Exemplo n.º 1
0
    private void initEncoder(string codec)
    {
        MediaFormat format = new MediaFormat();

        switch (codec)
        {
        case "amrnb":
            audioEncoder = MediaCodec.CreateEncoderByType(MediaFormat.MimetypeAudioAmrNb);
            format.SetString(MediaFormat.KeyMime, MediaFormat.MimetypeAudioAmrNb);
            format.SetInteger(MediaFormat.KeySampleRate, 8000);
            format.SetInteger(MediaFormat.KeyBitRate, 7950);
            break;

        case "amrwb":
            audioEncoder = MediaCodec.CreateEncoderByType(MediaFormat.MimetypeAudioAmrWb);
            format.SetString(MediaFormat.KeyMime, MediaFormat.MimetypeAudioAmrWb);
            format.SetInteger(MediaFormat.KeySampleRate, 16000);
            format.SetInteger(MediaFormat.KeyBitRate, 18250);
            break;

        default:
            throw new Exception("Unknown recorder codec selected " + codec);
        }

        format.SetInteger(MediaFormat.KeyChannelCount, 1);
        format.SetInteger(MediaFormat.KeyMaxInputSize, bufferSize);
        audioEncoder.SetCallback(this);
        audioEncoder.Configure(format, null, null, MediaCodecConfigFlags.Encode);
        audioEncoder.Start();
    }
        /**
         * Edits a video file, saving the contents to a new file. This involves decoding and
         * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy.
         * <p>
         * If we recognize the decoded format we can do this in Java code using the ByteBuffer[]
         * output, but it's not practical to support all OEM formats. By using a SurfaceTexture
         * for output and a Surface for input, we can avoid issues with obscure formats and can
         * use a fragment shader to do transformations.
         */
        private VideoChunks editVideoFile(VideoChunks inputData)
        {
            if (AppSettings.Logging.SendToConsole)
            {
                Log.Debug(TAG, "editVideoFile " + mWidth + "x" + mHeight);
            }
            VideoChunks   outputData    = new VideoChunks();
            MediaCodec    decoder       = null;
            MediaCodec    encoder       = null;
            InputSurface  inputSurface  = null;
            OutputSurface outputSurface = null;

            try {
                MediaFormat inputFormat = inputData.getMediaFormat();
                // Create an encoder format that matches the input format. (Might be able to just
                // re-use the format used to generate the video, since we want it to be the same.)
                MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight);
                outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecInfo.CodecCapabilities.COLORFormatSurface);
                outputFormat.SetInteger(MediaFormat.KeyBitRate, inputFormat.GetInteger(MediaFormat.KeyBitRate));
                outputFormat.SetInteger(MediaFormat.KeyFrameRate, inputFormat.GetInteger(MediaFormat.KeyFrameRate));
                outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, inputFormat.GetInteger(MediaFormat.KeyIFrameInterval));
                outputData.setMediaFormat(outputFormat);
                encoder = MediaCodec.CreateEncoderByType(MIME_TYPE);
                encoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode);
                inputSurface = new InputSurface(encoder.CreateInputSurface());
                inputSurface.MakeCurrent();
                encoder.Start();
                // OutputSurface uses the EGL context created by InputSurface.
                decoder       = MediaCodec.CreateDecoderByType(MIME_TYPE);
                outputSurface = new OutputSurface();
                outputSurface.ChangeFragmentShader(FRAGMENT_SHADER);
                decoder.Configure(inputFormat, outputSurface.Surface, null, 0);
                decoder.Start();
                editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData);
            } finally {
                if (AppSettings.Logging.SendToConsole)
                {
                    Log.Debug(TAG, "shutting down encoder, decoder");
                }
                if (outputSurface != null)
                {
                    outputSurface.Release();
                }
                if (inputSurface != null)
                {
                    inputSurface.Release();
                }
                if (encoder != null)
                {
                    encoder.Stop();
                    encoder.Release();
                }
                if (decoder != null)
                {
                    decoder.Stop();
                    decoder.Release();
                }
            }
            return(outputData);
        }
Exemplo n.º 3
0
        //@Override
        //@SuppressLint({ "InlinedApi", "NewApi" })
        protected override void encodeWithMediaCodec()
        {
            int bufferSize = AudioRecord.GetMinBufferSize(mQuality.samplingRate, ChannelIn.Mono, Encoding.Pcm16bit) * 2;

            ((AACLATMPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate);

            mAudioRecord = new AudioRecord(Android.Media.AudioSource.Mic, mQuality.samplingRate, Android.Media.ChannelIn.Mono, Android.Media.Encoding.Pcm16bit, bufferSize);
            mMediaCodec  = MediaCodec.CreateEncoderByType("audio/mp4a-latm");
            MediaFormat format = new MediaFormat();

            format.SetString(MediaFormat.KeyMime, "audio/mp4a-latm");
            format.SetInteger(MediaFormat.KeyBitRate, mQuality.bitRate);
            format.SetInteger(MediaFormat.KeyChannelCount, 1);
            format.SetInteger(MediaFormat.KeySampleRate, mQuality.samplingRate);
            format.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecInfo.CodecProfileLevel.AACObjectLC);
            format.SetInteger(MediaFormat.KeyMaxInputSize, bufferSize);
            mMediaCodec.Configure(format, null, null, MediaCodecConfigFlags.Encode);
            mAudioRecord.StartRecording();
            mMediaCodec.Start();

            MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);

            Java.Nio.ByteBuffer[] inputBuffers = mMediaCodec.GetInputBuffers();

            mThread = new Thread(this);


            mThread.Start();

            // The packetizer encapsulates this stream in an RTP stream and send it over the network
            mPacketizer.setInputStream(inputStream);
            mPacketizer.start();

            mStreaming = true;
        }
Exemplo n.º 4
0
        /**
         * Configures encoder and muxer state, and prepares the input Surface.  Initializes
         * mEncoder, mMuxer, mInputSurface, mBufferInfo, mTrackIndex, and mMuxerStarted.
         */
        private void prepareEncoder()
        {
            mBufferInfo = new MediaCodec.BufferInfo();

            MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, _width, _height);

            // Set some properties.  Failing to specify some of these can cause the MediaCodec
            // configure() call to throw an unhelpful exception.
            format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface);
            format.SetInteger(MediaFormat.KeyBitRate, _bitRate);
            format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE);
            format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL);
            if (VERBOSE)
            {
                Log.Debug(TAG, "format: " + format);
            }

            // Create a MediaCodec encoder, and configure it with our format.  Get a Surface
            // we can use for input and wrap it with a class that handles the EGL work.
            //
            // If you want to have two EGL contexts -- one for display, one for recording --
            // you will likely want to defer instantiation of CodecInputSurface until after the
            // "display" EGL context is created, then modify the eglCreateContext call to
            // take eglGetCurrentContext() as the share_context argument.
            mEncoder = MediaCodec.CreateEncoderByType(MIME_TYPE);
            mEncoder.Configure(format, null, null, MediaCodec.ConfigureFlagEncode);
            _inputSurface = new InputSurface(mEncoder.CreateInputSurface());
            mEncoder.Start();

            // Output filename.  Ideally this would use Context.getFilesDir() rather than a
            // hard-coded output directory.
            string outputPath = System.IO.Path.Combine(_workingDirectory, "test." + _width + "x" + _height + ".mp4");

            Log.Info(TAG, "Output file is " + outputPath);

            // Create a MediaMuxer.  We can't add the video track and start() the muxer here,
            // because our MediaFormat doesn't have the Magic Goodies.  These can only be
            // obtained from the encoder after it has started processing data.
            //
            // We're not actually interested in multiplexing audio.  We just want to convert
            // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
            try {
                mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4);
            } catch (System.Exception e) {
                throw new System.Exception(e.Message);
            }

            mTrackIndex   = -1;
            mMuxerStarted = false;
        }
Exemplo n.º 5
0
        public VideoStreamer(int width, int height)
        {
            MediaFormat    videoFormat     = MediaFormat.CreateVideoFormat(MediaFormat.MimetypeVideoAvc, width, height);
            MediaCodecList availableCodecs = new MediaCodecList(MediaCodecListKind.RegularCodecs);
            MediaCodec     encoder         = MediaCodec.CreateEncoderByType(MediaFormat.MimetypeVideoAvc);

            _backgroundThread = new HandlerThread("EncoderBackgroundWorker");
            _backgroundThread.Start();
            _backgroundHandler = new Handler(_backgroundThread.Looper);

            encoder.SetCallback(new EncoderCallback(), _backgroundHandler);
            encoder.Configure(videoFormat, null, null, MediaCodecConfigFlags.Encode);
            encoder.Start();
        }
Exemplo n.º 6
0
        private void PrepareEncoder()
        {
            MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);

            if (codecInfo == null)
            {
                return;
            }

            int colorFormat;

            try
            {
                colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
            }
            catch
            {
                colorFormat = (int)MediaCodecCapabilities.Formatyuv420semiplanar;
            }

            var format = MediaFormat.CreateVideoFormat(MIME_TYPE, _Width, _Height);

            format.SetInteger(MediaFormat.KeyColorFormat, colorFormat);
            format.SetInteger(MediaFormat.KeyBitRate, _BitRate);
            format.SetInteger(MediaFormat.KeyFrameRate, _frameRate);
            format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL);

            _Encoder = MediaCodec.CreateEncoderByType(MIME_TYPE);
            _Encoder.Configure(format, null, null, MediaCodecConfigFlags.Encode);

            _Encoder.Start();

            //  Create a MediaMuxer.  We can't add the video track and start() the muxer here,
            //  because our MediaFormat doesn't have the Magic Goodies.  These can only be
            //  obtained from the encoder after it has started processing data.
            try
            {
                _Muxer = new MediaMuxer(_Filepath, MuxerOutputType.Mpeg4);
            }
            catch (Exception e)
            {
                Log.Error(TAG, e.Message, e);
                throw;
            }

            _TrackIndex   = -1;
            _MuxerStarted = false;
        }
Exemplo n.º 7
0
        private void SetupEncoder()
        {
            MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MediaHelper.MIME_TYPE_AVC, mWidth, mHeight);

            outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface);
            outputFormat.SetInteger(MediaFormat.KeyBitRate, mBitRate);

            outputFormat.SetInteger(MediaFormat.KeyFrameRate, mFrameRate);
            outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, mIFrameInterval);

            mEncoder = MediaCodec.CreateEncoderByType(MediaHelper.MIME_TYPE_AVC);
            mEncoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode);
            mInputSurface = new InputSurface(mEncoder.CreateInputSurface());
            mInputSurface.MakeCurrent();
            mEncoder.Start();
        }
Exemplo n.º 8
0
        public void start()
        {
            if (running)
            {
                return;
            }
            running = true;

            lock (availableBuffers)
            {
                availableBuffers.Clear();
            }

            audioEncoder = MediaCodec.CreateEncoderByType(encoderMimeType);
            audioEncoder.SetCallback(this);
            audioEncoder.Configure(mediaFormat, null, null, MediaCodecConfigFlags.Encode);
            audioEncoder.Start();
        }
Exemplo n.º 9
0
        /**
         * Configures encoder and muxer state, and prepares the input Surface.  Initializes
         * mEncoder, mMuxer, mInputSurface, mBufferInfo, mTrackIndex, and mMuxerStarted.
         */
        private void prepareEncoder(string outputPath)
        {
            _bfi             = new MediaCodec.BufferInfo();
            LatestOutputPath = outputPath;
            MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, _width, _height);

            // Set some properties.  Failing to specify some of these can cause the MediaCodec
            // configure() call to throw an unhelpful exception.

            format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface);
            format.SetInteger(MediaFormat.KeyBitRate, _bitRate);
            format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE);
            format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL);
            if (AppSettings.Logging.SendToConsole)
            {
                Log.Debug(TAG, "format: " + format);
            }

            // Create a MediaCodec encoder, and configure it with our format.  Get a Surface
            // we can use for input and wrap it with a class that handles the EGL work.
            //
            // If you want to have two EGL contexts -- one for display, one for recording --
            // you will likely want to defer instantiation of CodecInputSurface until after the
            // "display" EGL context is created, then modify the eglCreateContext call to
            // take eglGetCurrentContext() as the share_context argument.
            mEncoder = MediaCodec.CreateEncoderByType(MIME_TYPE);
            mEncoder.Configure(format, null, null, MediaCodec.ConfigureFlagEncode);
            _inputSurface = new InputSurface(mEncoder.CreateInputSurface());
            mEncoder.Start();

            Log.Info(TAG, "Output file is " + outputPath);
            try {
                _muxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4);
            } catch (System.Exception e) {
                throw new System.Exception(e.Message);
            }

            mTrackIndex  = -1;
            MuxerStarted = false;
        }
Exemplo n.º 10
0
        // https://github.com/lanhq147/SampleMediaFrame/blob/e2f20ff9eef73318e5a9b4de15458c5c2eb0fd46/app/src/main/java/com/google/android/exoplayer2/video/av/HWRecorder.java

        public bool BeginEncoding(int resX, int resY, int rateNumer, int rateDenom, int videoBitRate, int audioBitRate, string audioFile, string outputFile)
        {
            videoBufferInfo = new MediaCodec.BufferInfo();
            audioBufferInfo = new MediaCodec.BufferInfo();

            frameRateNumer = rateNumer;
            frameRateDenom = rateDenom;

            MediaFormat videoFormat = MediaFormat.CreateVideoFormat(VideoMimeType, resX, resY);

            videoFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface);
            videoFormat.SetInteger(MediaFormat.KeyBitRate, videoBitRate * 1000);
            videoFormat.SetFloat(MediaFormat.KeyFrameRate, rateNumer / (float)rateDenom);
            videoFormat.SetInteger(MediaFormat.KeyIFrameInterval, 4);
            videoFormat.SetInteger(MediaFormat.KeyProfile, (int)MediaCodecProfileType.Avcprofilehigh);
            videoFormat.SetInteger(MediaFormat.KeyLevel, (int)MediaCodecProfileLevel.Avclevel31);

            videoEncoder = MediaCodec.CreateEncoderByType(VideoMimeType);
            videoEncoder.Configure(videoFormat, null, null, MediaCodecConfigFlags.Encode);
            surface = videoEncoder.CreateInputSurface();
            videoEncoder.Start();

            MediaFormat audioFormat = MediaFormat.CreateAudioFormat(AudioMimeType, 44100, 1);

            audioFormat.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecProfileType.Aacobjectlc);
            audioFormat.SetInteger(MediaFormat.KeyBitRate, audioBitRate * 1000);

            audioEncoder = MediaCodec.CreateEncoderByType(AudioMimeType);
            audioEncoder.Configure(audioFormat, null, null, MediaCodecConfigFlags.Encode);
            audioEncoder.Start();

            try
            {
                muxer = new MediaMuxer(outputFile, MuxerOutputType.Mpeg4);
            }
            catch
            {
                return(false);
            }

            videoTrackIndex = -1;
            audioTrackIndex = -1;
            muxerStarted    = false;

            if (!ElgInitialize())
            {
                return(false);
            }

            audioData = File.ReadAllBytes(audioFile);

            if (audioData == null)
            {
                return(false);
            }

            DrainEncoder(videoEncoder, videoBufferInfo, videoTrackIndex, false);
            DrainEncoder(audioEncoder, audioBufferInfo, audioTrackIndex, false);

            audioEncodingTask = Task.Factory.StartNew(AudioEncodeThread, TaskCreationOptions.LongRunning);

            return(true);
        }