public bool Initialize()
        {
            _initialized = false;
            if (!StopDecoder())
            {
                return(_initialized);
            }

            _mediaFormat = GetMediaFormat(_mimeType, _sampleRate, _channels);
            _mediaCodec  = MediaCodec.CreateDecoderByType(_mimeType);
            _mediaCodec.Configure(
                format: _mediaFormat,
                surface: null,
                crypto: null,
                flags: MediaCodecConfigFlags.None);

            _audioTrack = GetAudioTrack();
            _audioTrack.Play();
            _mediaCodec.Start();


            _encoderThread = GetEncoderThread();
            _encoderThread.Start();

            _decoderThread = GetDecoderThread();
            _decoderThread.Start();

            _initialized = true;
            return(_initialized);
        }
Beispiel #2
0
    private void initEncoder(string codec)
    {
        MediaFormat format = new MediaFormat();

        switch (codec)
        {
        case "amrnb":
            audioEncoder = MediaCodec.CreateEncoderByType(MediaFormat.MimetypeAudioAmrNb);
            format.SetString(MediaFormat.KeyMime, MediaFormat.MimetypeAudioAmrNb);
            format.SetInteger(MediaFormat.KeySampleRate, 8000);
            format.SetInteger(MediaFormat.KeyBitRate, 7950);
            break;

        case "amrwb":
            audioEncoder = MediaCodec.CreateEncoderByType(MediaFormat.MimetypeAudioAmrWb);
            format.SetString(MediaFormat.KeyMime, MediaFormat.MimetypeAudioAmrWb);
            format.SetInteger(MediaFormat.KeySampleRate, 16000);
            format.SetInteger(MediaFormat.KeyBitRate, 18250);
            break;

        default:
            throw new Exception("Unknown recorder codec selected " + codec);
        }

        format.SetInteger(MediaFormat.KeyChannelCount, 1);
        format.SetInteger(MediaFormat.KeyMaxInputSize, bufferSize);
        audioEncoder.SetCallback(this);
        audioEncoder.Configure(format, null, null, MediaCodecConfigFlags.Encode);
        audioEncoder.Start();
    }
Beispiel #3
0
    public override void OnOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info)
    {
        if (!running)
        {
            return;
        }
        try
        {
            var ob = audioDecoder.GetOutputBuffer(index);

            ob.Position(info.Offset);
            ob.Limit(info.Offset + info.Size);

            byte[] decoded_data = new byte[info.Size];
            ob.Get(decoded_data);

            if (audioPlayer.Write(decoded_data, 0, decoded_data.Length) == 0)
            {
                // TODO drop frames
            }

            audioDecoder.ReleaseOutputBuffer(index, false);
        }
        catch (Exception e)
        {
            Logging.error("Exception occured while playing audio stream: " + e);
        }
    }
        /**
         * Edits a video file, saving the contents to a new file. This involves decoding and
         * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy.
         * <p>
         * If we recognize the decoded format we can do this in Java code using the ByteBuffer[]
         * output, but it's not practical to support all OEM formats. By using a SurfaceTexture
         * for output and a Surface for input, we can avoid issues with obscure formats and can
         * use a fragment shader to do transformations.
         */
        private VideoChunks editVideoFile(VideoChunks inputData)
        {
            if (AppSettings.Logging.SendToConsole)
            {
                Log.Debug(TAG, "editVideoFile " + mWidth + "x" + mHeight);
            }
            VideoChunks   outputData    = new VideoChunks();
            MediaCodec    decoder       = null;
            MediaCodec    encoder       = null;
            InputSurface  inputSurface  = null;
            OutputSurface outputSurface = null;

            try {
                MediaFormat inputFormat = inputData.getMediaFormat();
                // Create an encoder format that matches the input format. (Might be able to just
                // re-use the format used to generate the video, since we want it to be the same.)
                MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight);
                outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecInfo.CodecCapabilities.COLORFormatSurface);
                outputFormat.SetInteger(MediaFormat.KeyBitRate, inputFormat.GetInteger(MediaFormat.KeyBitRate));
                outputFormat.SetInteger(MediaFormat.KeyFrameRate, inputFormat.GetInteger(MediaFormat.KeyFrameRate));
                outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, inputFormat.GetInteger(MediaFormat.KeyIFrameInterval));
                outputData.setMediaFormat(outputFormat);
                encoder = MediaCodec.CreateEncoderByType(MIME_TYPE);
                encoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode);
                inputSurface = new InputSurface(encoder.CreateInputSurface());
                inputSurface.MakeCurrent();
                encoder.Start();
                // OutputSurface uses the EGL context created by InputSurface.
                decoder       = MediaCodec.CreateDecoderByType(MIME_TYPE);
                outputSurface = new OutputSurface();
                outputSurface.ChangeFragmentShader(FRAGMENT_SHADER);
                decoder.Configure(inputFormat, outputSurface.Surface, null, 0);
                decoder.Start();
                editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData);
            } finally {
                if (AppSettings.Logging.SendToConsole)
                {
                    Log.Debug(TAG, "shutting down encoder, decoder");
                }
                if (outputSurface != null)
                {
                    outputSurface.Release();
                }
                if (inputSurface != null)
                {
                    inputSurface.Release();
                }
                if (encoder != null)
                {
                    encoder.Stop();
                    encoder.Release();
                }
                if (decoder != null)
                {
                    decoder.Stop();
                    decoder.Release();
                }
            }
            return(outputData);
        }
        /**
         * Checks the video file to see if the contents match our expectations. We decode the
         * video to a Surface and check the pixels with GL.
         */
        private void checkVideoFile(VideoChunks inputData)
        {
            OutputSurface surface = null;
            MediaCodec    decoder = null;

            mLargestColorDelta = -1;
            if (AppSettings.Logging.SendToConsole)
            {
                Log.Debug(TAG, "checkVideoFile");
            }
            try {
                surface = new OutputSurface(mWidth, mHeight);
                MediaFormat format = inputData.getMediaFormat();
                decoder = MediaCodec.CreateDecoderByType(MIME_TYPE);
                decoder.Configure(format, surface.Surface, null, 0);
                decoder.Start();
                int badFrames = checkVideoData(inputData, decoder, surface);
                if (badFrames != 0)
                {
                    fail("Found " + badFrames + " bad frames");
                }
            } finally {
                if (surface != null)
                {
                    surface.Release();
                }
                if (decoder != null)
                {
                    decoder.Stop();
                    decoder.Release();
                }
                Log.Info(TAG, "Largest color delta: " + mLargestColorDelta);
            }
        }
        internal VideoLengthExtractor(System.IO.Stream stream, long reportedLength)
        {
            _reportedLength = reportedLength;
            extractor       = new MediaExtractor();
            extractor.SetDataSource(new ReadSeekStreamMediaSource(stream));
            var format = SelectFirstVideoTrack() ?? throw new Exception("Stream has no video track");

            // Get the original video size
            var naturalWidth  = format.GetInteger(MediaFormat.KeyWidth);
            var naturalHeight = format.GetInteger(MediaFormat.KeyHeight);



            decoder = MediaCodec.CreateDecoderByType(format.GetString(MediaFormat.KeyMime));

            ///Surface.



            //extractor.SeekTo(0, MediaExtractorSeekTo.)
            var info = new MediaCodec.BufferInfo();
            //info.

            //videoLengthExtractor.
        }
Beispiel #7
0
        public override void OnOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info)
        {
            if (!running)
            {
                return;
            }
            try
            {
                var ob = audioDecoder.GetOutputBuffer(index);

                ob.Position(info.Offset);
                ob.Limit(info.Offset + info.Size);

                byte[] decoded_data = new byte[info.Size];
                ob.Get(decoded_data);

                audioDecoder.ReleaseOutputBuffer(index, false);

                decodedDataCallback.onDecodedData(decoded_data);
            }
            catch (Exception e)
            {
                Logging.error("Exception occured while playing audio stream: " + e);
            }
        }
Beispiel #8
0
        public void stop()
        {
            if (!running)
            {
                return;
            }
            running = false;

            lock (pendingFrames)
            {
                pendingFrames.Clear();
                availableBuffers.Clear();
            }

            if (audioDecoder != null)
            {
                try
                {
                    audioDecoder.Stop();
                    audioDecoder.Release();
                }
                catch (Exception)
                {
                }
                audioDecoder.Dispose();
                audioDecoder = null;
            }
        }
Beispiel #9
0
        public bool setup(DecoderCallback callback_obj, int width, int height) //format_hint is aviFileContent
        {
            callbackThread = new HandlerThread("H264DecoderHandler");
            callbackThread.Start();
            handler = new Handler(callbackThread.Looper);

            mDecoder     = MediaCodec.CreateDecoderByType(MIME);
            mCallbackObj = callback_obj;
            myCallback   = new MyCallback(mDecoder, mCallbackObj);
            mDecoder.SetCallback(myCallback, handler);

            //mOutputFormat = mDecoder.GetOutputFormat(); // option B
            inputFormat = MediaFormat.CreateVideoFormat(MIME, width, height);
            inputFormat.SetInteger(MediaFormat.KeyMaxInputSize, width * height);
            inputFormat.SetInteger("durationUs", 63446722);
            //inputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatyuv420semiplanar);
            //inputFormat.SetInteger(MediaFormat.KeyIFrameInterval, 60);
            try
            {
                mDecoder.Configure(inputFormat, null, null, 0 /* Decoder */);
            }
            catch (Exception ex)
            {
                throw ex;
            }

            Console.WriteLine("before mDecoder.Start()");
            mDecoder.Start();
            Console.WriteLine("after mDecoder.Start()");

            return(true);
        }
        /**
         * Constructs the {@link MediaCodecWrapper} wrapper object around the video codec.
         * The codec is created using the encapsulated information in the
         * {@link MediaFormat} object.
         *
         * @param trackFormat The format of the media object to be decoded.
         * @param surface Surface to render the decoded frames.
         * @return
         */
        public static MediaCodecWrapper fromVideoFormat(MediaFormat trackFormat,
                                                        Surface surface)
        {
            MediaCodecWrapper result     = null;
            MediaCodec        videoCodec = null;

            // BEGIN_INCLUDE(create_codec)
            string mimeType = trackFormat.GetString(MediaFormat.KeyMime);

            // Check to see if this is actually a video mime type. If it is, then create
            // a codec that can decode this mime type.
            if (mimeType.Contains("video/"))
            {
                videoCodec = MediaCodec.CreateDecoderByType(mimeType);
                videoCodec.Configure(trackFormat, surface, null, 0);
            }

            // If codec creation was successful, then create a wrapper object around the
            // newly created codec.
            if (videoCodec != null)
            {
                result = new MediaCodecWrapper(videoCodec);
            }
            // END_INCLUDE(create_codec)

            return(result);
        }
Beispiel #11
0
        /// <summary>
        /// Called when MediaCodec wants a new frame to decode
        /// </summary>
        /// <param name="codec">Codec.</param>
        /// <param name="index">Index.</param>
        public override void OnInputBufferAvailable(MediaCodec codec, int index)
        {
            if (_videoQueue.Size < 1)
            {
                // FIXME: Is it proper to enqueue an empty
                // buffer like this?
                codec.QueueInputBuffer(index, 0, 0, 0, MediaCodecBufferFlags.None);
                return;
            }

            var data = _videoQueue.Back();

            _videoQueue.PopBack();
            if (data != null)
            {
                // Get pre-allocated buffer from MediaCodec
                Java.Nio.ByteBuffer buffer = codec.GetInputBuffer(index);

                // Stuff in our raw framedata
                buffer.Put(data);

                // Tell the decoder to process the frame
                codec.QueueInputBuffer(index, 0, data.Length, 0, MediaCodecBufferFlags.None);
            }
        }
        protected override void Dispose(bool disposing)
        {
            if (Control != null && videoView != null)
            {
                //videoView.Prepared -= OnVideoViewPrepared;
            }

            if (Element != null)
            {
                Element.UpdateStatus -= OnUpdateStatus;
            }

            rtspCancel.Cancel();
            Thread.Sleep(100);
            try
            {
                codec.Stop();
            }
            catch
            {
            }

            try
            {
                codec.Dispose();
            }
            catch
            {
            }

            rtspCancel = null;
            codec      = null;

            base.Dispose(disposing);
        }
            public override void OnOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info)
            {
                //var buffer = codec.GetOutputBuffer(index);
                //Console.WriteLine("OnOutputBufferAvailable");

                codec.ReleaseOutputBuffer(index, true);
            }
Beispiel #14
0
        //@Override
        //@SuppressLint({ "InlinedApi", "NewApi" })
        protected override void encodeWithMediaCodec()
        {
            int bufferSize = AudioRecord.GetMinBufferSize(mQuality.samplingRate, ChannelIn.Mono, Encoding.Pcm16bit) * 2;

            ((AACLATMPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate);

            mAudioRecord = new AudioRecord(Android.Media.AudioSource.Mic, mQuality.samplingRate, Android.Media.ChannelIn.Mono, Android.Media.Encoding.Pcm16bit, bufferSize);
            mMediaCodec  = MediaCodec.CreateEncoderByType("audio/mp4a-latm");
            MediaFormat format = new MediaFormat();

            format.SetString(MediaFormat.KeyMime, "audio/mp4a-latm");
            format.SetInteger(MediaFormat.KeyBitRate, mQuality.bitRate);
            format.SetInteger(MediaFormat.KeyChannelCount, 1);
            format.SetInteger(MediaFormat.KeySampleRate, mQuality.samplingRate);
            format.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecInfo.CodecProfileLevel.AACObjectLC);
            format.SetInteger(MediaFormat.KeyMaxInputSize, bufferSize);
            mMediaCodec.Configure(format, null, null, MediaCodecConfigFlags.Encode);
            mAudioRecord.StartRecording();
            mMediaCodec.Start();

            MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);

            Java.Nio.ByteBuffer[] inputBuffers = mMediaCodec.GetInputBuffers();

            mThread = new Thread(this);


            mThread.Start();

            // The packetizer encapsulates this stream in an RTP stream and send it over the network
            mPacketizer.setInputStream(inputStream);
            mPacketizer.start();

            mStreaming = true;
        }
 /**
  * Releases resources and ends the encoding/decoding session.
  */
 public void stopAndRelease()
 {
     mDecoder.Stop();
     mDecoder.Release();
     mDecoder = null;
     mHandler = null;
 }
        protected override void OnDraw(Canvas canvas)
        {
            if (rtspCancel == null)
            {
                rtspClientStart();
            }

            if (codec == null && (h264 || h265))
            {
                codec = MediaCodec.CreateDecoderByType(h265
                    ? MediaFormat.MimetypeVideoHevc
                    : MediaFormat.MimetypeVideoAvc);

                callbacks = new CallBacks(this);

                codec.SetCallback(callbacks);

                var mediafmt = MediaFormat.CreateVideoFormat(h265
                    ? MediaFormat.MimetypeVideoHevc
                    : MediaFormat.MimetypeVideoAvc, 1920, 1080);

                codec.Configure(mediafmt, videoView.Holder.Surface, null, MediaCodecConfigFlags.None);

                codec.Start();
            }

            base.OnDraw(canvas);
        }
        public static void ReturnDecoder(string mime, MediaCodec decoder)
        {
            lock (_decoders)
            {
                if (!_decoders.TryGetValue(mime, out List <Entry> codecs))
                {
                    codecs = new List <Entry>();
                    _decoders.Add(mime, codecs);
                }

                if (codecs.Count > _maxDecodersInPool)
                {
                    decoder.Dispose();
                }
                else
                {
                    codecs.Add(new Entry(decoder));

                    if (_allocationManager == null || _allocationManager.IsCompleted)
                    {
                        _allocationManager = Task.Factory.StartNew(
                            RemoveUnusedCodecs, CancellationToken.None, TaskCreationOptions.LongRunning, TaskScheduler.Default);
                    }
                }
            }
        }
Beispiel #18
0
    public override void OnOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info)
    {
        if (!running)
        {
            return;
        }
        try
        {
            var ob = audioEncoder.GetOutputBuffer(index);

            ob.Position(info.Offset);
            ob.Limit(info.Offset + info.Size);

            byte[] buffer = new byte[info.Size];
            ob.Get(buffer, 0, info.Size);
            audioEncoder.ReleaseOutputBuffer(index, false);

            lock (outputBuffers)
            {
                outputBuffers.Add(buffer);
            }
        }
        catch (Exception e)
        {
            Logging.error("Exception occured while recording audio stream: " + e);
        }
    }
Beispiel #19
0
 public void stop()
 {
     if (!running)
     {
         return;
     }
     running = false;
     if (audioEncoder != null)
     {
         try
         {
             audioEncoder.Stop();
             audioEncoder.Release();
         }
         catch (Exception)
         {
         }
         audioEncoder.Dispose();
         audioEncoder = null;
     }
     lock (availableBuffers)
     {
         availableBuffers.Clear();
     }
 }
        public void SetupVideo(int width, int height, byte[] spsData, byte[] ppsData)
        {
            MediaFormat videoFormat = MediaFormat.CreateVideoFormat(
                mime: MediaFormat.MimetypeVideoAvc,
                width: width,
                height: height);

            /*
             * TODO: Use SPS / PPS
             * videoFormat.SetByteBuffer("csd-0", Java.Nio.ByteBuffer.Wrap(spsData));
             * videoFormat.SetByteBuffer("csd-1", Java.Nio.ByteBuffer.Wrap(ppsData));
             */

            videoFormat.SetInteger(MediaFormat.KeyMaxInputSize, 100000);

            _videoCodec = MediaCodec.CreateDecoderByType(
                MediaFormat.MimetypeVideoAvc);

            _videoCodec.SetCallback(this);
            _videoCodec.Configure(format: videoFormat,
                                  surface: new Surface(_surface),
                                  crypto: null,
                                  flags: MediaCodecConfigFlags.None);

            _videoCodec.Start();
        }
        public override void OnOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info)
        {
            Java.Nio.ByteBuffer decodedSample = codec.GetOutputBuffer(index);

            // Just release outputBuffer, callback will handle rendering
            codec.ReleaseOutputBuffer(index, true);
        }
            public override void OnInputBufferAvailable(MediaCodec codec, int index)
            {
                //var buffer = codec.GetInputBuffer(index);
                //Console.WriteLine("OnInputBufferAvailable " + index);

                buffers.Push(index);
            }
Beispiel #23
0
 public void stop()
 {
     if (mStreaming)
     {
         try {
             if (mMode == MODE_MEDIARECORDER_API)
             {
                 mMediaRecorder.Stop();
                 mMediaRecorder.Release();
                 mMediaRecorder = null;
                 closeSockets();
                 mPacketizer.stop();
             }
             else
             {
                 mPacketizer.stop();
                 mMediaCodec.Stop();
                 mMediaCodec.Release();
                 mMediaCodec = null;
             }
         } catch (Exception e) {
             System.Diagnostics.Trace.WriteLine(e.StackTrace.ToString());
         }
         mStreaming = false;
     }
 }
Beispiel #24
0
        public override void OnOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info)
        {
            Java.Nio.ByteBuffer decodedSample = codec.GetOutputBuffer(index);

            _audioTrack.Write(decodedSample, 4096, WriteMode.NonBlocking);

            codec.ReleaseOutputBuffer(index, true);
        }
        private void FeedClipToEncoder(SamplerClip clip)
        {
            mLastSampleTime = 0;

            MediaCodec decoder = null;

            MediaExtractor extractor = SetupExtractorForClip(clip);

            if (extractor == null)
            {
                return;
            }

            int trackIndex = GetVideoTrackIndex(extractor);

            extractor.SelectTrack(trackIndex);

            MediaFormat clipFormat = extractor.GetTrackFormat(trackIndex);

            if (clip.getStartTime() != -1)
            {
                extractor.SeekTo(clip.getStartTime() * 1000, MediaExtractorSeekTo.PreviousSync);
                clip.setStartTime(extractor.SampleTime / 1000);
            }

            try
            {
                decoder        = MediaCodec.CreateDecoderByType(MediaHelper.MIME_TYPE_AVC);
                mOutputSurface = new OutputSurface();

                decoder.Configure(clipFormat, mOutputSurface.Surface, null, 0);
                decoder.Start();

                ResampleVideo(extractor, decoder, clip);
            }
            catch (System.Exception e)
            {
            }
            finally
            {
                if (mOutputSurface != null)
                {
                    mOutputSurface.Release();
                }
                if (decoder != null)
                {
                    decoder.Stop();
                    decoder.Release();
                }

                if (extractor != null)
                {
                    extractor.Release();
                    extractor = null;
                }
            }
        }
 private MediaCodecWrapper(MediaCodec codec)
 {
     mDecoder = codec;
     codec.Start();
     mInputBuffers           = codec.GetInputBuffers();
     mOutputBuffers          = codec.GetOutputBuffers();
     mOutputBufferInfo       = new MediaCodec.BufferInfo[mOutputBuffers.Length];
     mAvailableInputBuffers  = new Queue <int>(mOutputBuffers.Length);
     mAvailableOutputBuffers = new Queue <int>(mInputBuffers.Length);
 }
        private void Init()
        {
            if (sps.Length == 14)
                decoderWidth = 1280;
            else
                decoderWidth = 960;

            MediaFormat videoFormat = MediaFormat.CreateVideoFormat("video/avc", decoderWidth, decoderHeight);
            videoFormat.SetByteBuffer("csd-0", ByteBuffer.Wrap(sps));
            videoFormat.SetByteBuffer("csd-1", ByteBuffer.Wrap(pps));

            string str = videoFormat.GetString("mime");
            try
            {
                var cdx = MediaCodec.CreateDecoderByType(str);
                cdx.Configure(videoFormat, Holder.Surface, (MediaCrypto)null, 0);
                cdx.SetVideoScalingMode(VideoScalingMode.ScaleToFit);
                cdx.Start();

                codec = cdx;
            }
            catch (Exception ex)
            {
//handle
            }

            bConfigured = true;

            //set surface aspect ratio
            MainActivity.getActivity().RunOnUiThread(() =>
            {
                float videoProportion = (float)decoderWidth / (float)decoderHeight;

                var size = new Android.Graphics.Point();
                MainActivity.getActivity().WindowManager.DefaultDisplay.GetSize(size);
                int screenWidth = size.X;
                int screenHeight = size.Y;
                float screenProportion = (float)screenWidth / (float)screenHeight;

                var lp = this.LayoutParameters;
                if (videoProportion > screenProportion)
                {
                    lp.Width = screenWidth;
                    lp.Height = (int)((float)screenWidth / videoProportion);
                }
                else
                {
                    lp.Width = (int)(videoProportion * (float)screenHeight);
                    lp.Height = screenHeight;
                }

                this.LayoutParameters = lp;
            });

        }
        public void Initialize(IServiceRegistry services, string url, long startPosition, long length)
        {
            if (isInitialized)
            {
                return;
            }

            try
            {
                inputFile = new Java.IO.File(url);
                if (!inputFile.CanRead())
                {
                    throw new Exception(string.Format("Unable to read: {0} ", inputFile.AbsolutePath));
                }

                inputFileDescriptor = new Java.IO.FileInputStream(inputFile);

                // ===================================================================================================
                // Initialize the audio media extractor
                mediaExtractor = new MediaExtractor();
                mediaExtractor.SetDataSource(inputFileDescriptor.FD, startPosition, length);

                var videoTrackIndex = FindTrack(mediaExtractor, MediaType.Video);
                var audioTrackIndex = FindTrack(mediaExtractor, MediaType.Audio);
                HasAudioTrack = audioTrackIndex >= 0;

                mediaTrackIndex = MediaType == MediaType.Audio ? audioTrackIndex : videoTrackIndex;
                if (mediaTrackIndex < 0)
                {
                    throw new Exception(string.Format($"No {MediaType} track found in: {inputFile.AbsolutePath}"));
                }

                mediaExtractor.SelectTrack(mediaTrackIndex);

                var trackFormat = mediaExtractor.GetTrackFormat(mediaTrackIndex);
                MediaDuration = TimeSpanExtensions.FromMicroSeconds(trackFormat.GetLong(MediaFormat.KeyDuration));

                ExtractMediaMetadata(trackFormat);

                // Create a MediaCodec mediadecoder, and configure it with the MediaFormat from the mediaExtractor
                // It's very important to use the format from the mediaExtractor because it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
                var mime = trackFormat.GetString(MediaFormat.KeyMime);
                MediaDecoder = MediaCodec.CreateDecoderByType(mime);
                MediaDecoder.Configure(trackFormat, decoderOutputSurface, null, 0);

                isInitialized = true;

                StartWorker();
            }
            catch (Exception e)
            {
                Release();
                throw e;
            }
        }
Beispiel #29
0
 override public void OnInputBufferAvailable(MediaCodec mc, int inputBufferId)
 {
     Console.WriteLine("called OnInputBufferAvailable at Decoder");
     Task.Run(() =>
     {
         lock (this)
         {
             OnInputBufferAvailableInner(mc, inputBufferId);
         }
     });
 }
Beispiel #30
0
 public override void OnInputBufferAvailable(MediaCodec codec, int index)
 {
     if (!running)
     {
         return;
     }
     lock (availableBuffers)
     {
         availableBuffers.Add(index);
     }
 }