コード例 #1
0
        protected override void ExtractMediaMetadata(MediaFormat format)
        {
            var audioChannels   = format.GetInteger(MediaFormat.KeyChannelCount);
            var audioSampleRate = format.GetInteger(MediaFormat.KeySampleRate);

            MediaMetadata = new MediaCodecAudioMetadata(audioChannels, audioSampleRate, MediaDuration);
        }
コード例 #2
0
        /**
         * Edits a video file, saving the contents to a new file. This involves decoding and
         * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy.
         * <p>
         * If we recognize the decoded format we can do this in Java code using the ByteBuffer[]
         * output, but it's not practical to support all OEM formats. By using a SurfaceTexture
         * for output and a Surface for input, we can avoid issues with obscure formats and can
         * use a fragment shader to do transformations.
         */
        private VideoChunks editVideoFile(VideoChunks inputData)
        {
            if (AppSettings.Logging.SendToConsole)
            {
                Log.Debug(TAG, "editVideoFile " + mWidth + "x" + mHeight);
            }
            VideoChunks   outputData    = new VideoChunks();
            MediaCodec    decoder       = null;
            MediaCodec    encoder       = null;
            InputSurface  inputSurface  = null;
            OutputSurface outputSurface = null;

            try {
                MediaFormat inputFormat = inputData.getMediaFormat();
                // Create an encoder format that matches the input format. (Might be able to just
                // re-use the format used to generate the video, since we want it to be the same.)
                MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight);
                outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecInfo.CodecCapabilities.COLORFormatSurface);
                outputFormat.SetInteger(MediaFormat.KeyBitRate, inputFormat.GetInteger(MediaFormat.KeyBitRate));
                outputFormat.SetInteger(MediaFormat.KeyFrameRate, inputFormat.GetInteger(MediaFormat.KeyFrameRate));
                outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, inputFormat.GetInteger(MediaFormat.KeyIFrameInterval));
                outputData.setMediaFormat(outputFormat);
                encoder = MediaCodec.CreateEncoderByType(MIME_TYPE);
                encoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode);
                inputSurface = new InputSurface(encoder.CreateInputSurface());
                inputSurface.MakeCurrent();
                encoder.Start();
                // OutputSurface uses the EGL context created by InputSurface.
                decoder       = MediaCodec.CreateDecoderByType(MIME_TYPE);
                outputSurface = new OutputSurface();
                outputSurface.ChangeFragmentShader(FRAGMENT_SHADER);
                decoder.Configure(inputFormat, outputSurface.Surface, null, 0);
                decoder.Start();
                editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData);
            } finally {
                if (AppSettings.Logging.SendToConsole)
                {
                    Log.Debug(TAG, "shutting down encoder, decoder");
                }
                if (outputSurface != null)
                {
                    outputSurface.Release();
                }
                if (inputSurface != null)
                {
                    inputSurface.Release();
                }
                if (encoder != null)
                {
                    encoder.Stop();
                    encoder.Release();
                }
                if (decoder != null)
                {
                    decoder.Stop();
                    decoder.Release();
                }
            }
            return(outputData);
        }
コード例 #3
0
        protected override void ExtractMediaMetadata(MediaFormat format)
        {
            var videoWidth  = format.GetInteger(MediaFormat.KeyWidth);
            var videoHeight = format.GetInteger(MediaFormat.KeyHeight);

            Logger.Verbose(string.Format("Video size: ({0}x{1})", videoWidth, videoHeight));

            MediaMetadata = new MediaCodecVideoMetadata(videoWidth, videoHeight, MediaDuration);
        }
コード例 #4
0
        /**
         * 
         * @param filePath
         */
        public void startPlay(string path)
        {
            eosReceived = false;
            mExtractor = new MediaExtractor();
            try
            {
                mExtractor.SetDataSource(path);
            }
            catch (IOException e)
            {
                e.PrintStackTrace();
            }

            int channel = 0;
            for (int i = 0; i < mExtractor.TrackCount; i++)
            {
                MediaFormat format = mExtractor.GetTrackFormat(i);
                string mime = format.GetString(MediaFormat.KeyMime);
                if (mime.StartsWith("audio/"))
                {
                    mExtractor.SelectTrack(i);
                    Log.Debug("TAG", "format : " + format);
                    ByteBuffer csd = format.GetByteBuffer("csd-0");

                    for (int k = 0; k < csd.Capacity(); ++k)
                    {
                        Log.Error("TAG", "csd : " + csd.ToArray<Byte>()[k]);
                    }
                    mSampleRate = format.GetInteger(MediaFormat.KeySampleRate);
                    channel = format.GetInteger(MediaFormat.KeyChannelCount);
                    break;
                }
            }
            MediaFormat format2 = makeAACCodecSpecificData(MediaCodecInfo.CodecProfileLevel.AACObjectLC, mSampleRate, channel);
            if (format2 == null)
                return;

            mDecoder = MediaCodec.createDecoderByType("audio/mp4a-latm");
            mDecoder.configure(format, null, null, 0);

            if (mDecoder == null)
            {
                Log.e("DecodeActivity", "Can't find video info!");
                return;
            }

            mDecoder.start();

            new Thread(AACDecoderAndPlayRunnable).start();
        }
コード例 #5
0
        public static int GetMediaFormatPropertyInteger(Uri uri, string key, int defaultValue)
        {
            int value = defaultValue;

            MediaExtractor extractor = new MediaExtractor();

            try
            {
                extractor.SetDataSource(uri.ToString());
            }
            catch (System.Exception e)
            {
                return(value);
            }

            MediaFormat format = GetTrackFormat(extractor, MIME_TYPE_AVC);

            extractor.Release();

            if (format.ContainsKey(key))
            {
                value = format.GetInteger(key);
            }

            return(value);
        }
コード例 #6
0
 private int getInt(MediaFormat mediaFormat, string key, int defaultValue)
 {
     if (mediaFormat.ContainsKey(key))
     {
         return mediaFormat.GetInteger(key);
     }
     return defaultValue;
 }
コード例 #7
0
        public MediaFormat CreateVideoOutputFormat(MediaFormat inputFormat)
        {
            int width = inputFormat.GetInteger(MediaFormat.KeyWidth);
            int height = inputFormat.GetInteger(MediaFormat.KeyHeight);
            int longer, shorter, outWidth, outHeight;

            if (width >= height)
            {
                longer    = width;
                shorter   = height;
                outWidth  = LONGER_LENGTH;
                outHeight = SHORTER_LENGTH;
            }
            else
            {
                shorter   = width;
                longer    = height;
                outWidth  = SHORTER_LENGTH;
                outHeight = LONGER_LENGTH;
            }

            //if (longer * 9 != shorter * 16)
            //{
            //    throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")");
            //}
            if (shorter <= SHORTER_LENGTH)
            {
#if DEBUG
                System.Console.WriteLine("This video is less or equal to 720p, pass-through. (" + width + "x" + height + ")");
#endif

                return(null);
            }

            MediaFormat format = MediaFormat.CreateVideoFormat("video/avc", outWidth, outHeight);
            format.SetInteger(MediaFormat.KeyBitRate, mVideoBitrate);
            format.SetInteger(MediaFormat.KeyFrameRate, 30);
            format.SetInteger(MediaFormat.KeyIFrameInterval, 3);
            // this is obsolete: MediaCodecInfo.CodecCapabilities.COLORFormatSurface, so using MediaCodecCapabilities.Formatsurface instead
            format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface);

            return(format);
        }
コード例 #8
0
        override public void OnOutputBufferAvailable(MediaCodec mc, int outputBufferId, BufferInfo info)
        {
            ByteBuffer  outputBuffer = mDecoder.GetOutputBuffer(outputBufferId);
            MediaFormat bufferFormat = mDecoder.GetOutputFormat(outputBufferId); // option A

            Console.WriteLine("decoded buffer format:" + bufferFormat.ToString());

            // bufferFormat is equivalent to mOutputFormat
            // outputBuffer is ready to be processed or rendered.

            Console.WriteLine("OnOutputBufferAvailable: outputBufferId = " + outputBufferId.ToString());
            byte[] decoded_data = new byte[info.Size];
            outputBuffer.Position(info.Offset);
            outputBuffer.Get(decoded_data, 0, info.Size);
            mDecoder.ReleaseOutputBuffer(outputBufferId, false);
            Console.WriteLine("call OnDecodeFrame from decoder!");

            Console.WriteLine("bufferFormat.getInteger(MediaFormat.KeyWidth)=" + bufferFormat.GetInteger(MediaFormat.KeyWidth).ToString() + " bufferFormat.getInteger(MediaFormat.KeyHeight)=" + bufferFormat.GetInteger(MediaFormat.KeyHeight).ToString());
            mCallbackObj.OnDecodeFrame(decoded_data, bufferFormat.GetInteger(MediaFormat.KeyWidth), bufferFormat.GetInteger(MediaFormat.KeyHeight), bufferFormat.GetInteger(MediaFormat.KeyColorFormat));
        }
コード例 #9
0
        public MediaFormat CreateAudioOutputFormat(MediaFormat inputFormat)
        {
            if (mAudioBitrate == AUDIO_BITRATE_AS_IS || mAudioChannels == AUDIO_CHANNELS_AS_IS)
            {
                return(null);
            }

            // Use original sample rate, as resampling is not supported yet.
            MediaFormat format = MediaFormat.CreateAudioFormat(MediaFormatExtraConstants.MimetypeAudioAac,
                                                               inputFormat.GetInteger(MediaFormat.KeySampleRate),
                                                               mAudioChannels);

            // this is obsolete: MediaCodecInfo.CodecProfileLevel.AACObjectLC, so using MediaCodecProfileType.Aacobjectlc instead
            format.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecProfileType.Aacobjectlc);
            format.SetInteger(MediaFormat.KeyBitRate, mAudioBitrate);
            return(format);
        }
コード例 #10
0
        public static MediaFormat GetAudioTrackFormat(string filepath, Android.Net.Uri inputUri = null)
        {
            MediaExtractor extractor = new MediaExtractor();

            if (inputUri != null)
            {
                extractor.SetDataSource(Android.App.Application.Context, inputUri, null);
            }
            else if (filepath != null)
            {
                extractor.SetDataSource(filepath);
            }
            int trackCount = extractor.TrackCount;
            int bufferSize = -1;

            for (int i = 0; i < trackCount; i++)
            {
                MediaFormat format             = extractor.GetTrackFormat(i);
                string      mime               = format.GetString(MediaFormat.KeyMime);
                bool        selectCurrentTrack = false;
                if (mime.StartsWith("audio/"))
                {
                    selectCurrentTrack = true;
                }
                else if (mime.StartsWith("video/"))
                {
                    selectCurrentTrack = false;
                }
                if (selectCurrentTrack)
                {
                    extractor.SelectTrack(i);
                    if (format.ContainsKey(MediaFormat.KeyMaxInputSize))
                    {
                        int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize);
                        bufferSize = newSize > bufferSize ? newSize : bufferSize;
                    }
                    return(format);
                }
            }
            return(null);
        }
コード例 #11
0
 public Task <bool> TrimAsync(int startMS, int lengthMS, string inputPath, string outputPath)
 {
     return(Task.Run <bool>(() =>
     {
         try
         {
             bool didOperationSucceed = false;
             MediaExtractor extractor = new MediaExtractor();
             extractor.SetDataSource(inputPath);
             int trackCount = extractor.TrackCount;
             // Set up MediaMuxer for the destination.
             MediaMuxer muxer;
             muxer = new MediaMuxer(outputPath, MuxerOutputType.Mpeg4);
             // Set up the tracks and retrieve the max buffer size for selected
             // tracks.
             Dictionary <int, int> indexDict = new Dictionary <int, int>(trackCount);
             int bufferSize = -1;
             for (int i = 0; i < trackCount; i++)
             {
                 MediaFormat format = extractor.GetTrackFormat(i);
                 string mime = format.GetString(MediaFormat.KeyMime);
                 bool selectCurrentTrack = false;
                 if (mime.StartsWith("audio/"))
                 {
                     selectCurrentTrack = true;
                 }
                 else if (mime.StartsWith("video/"))
                 {
                     selectCurrentTrack = true;
                 }
                 if (selectCurrentTrack)
                 {
                     extractor.SelectTrack(i);
                     int dstIndex = muxer.AddTrack(format);
                     indexDict.Add(i, dstIndex);
                     if (format.ContainsKey(MediaFormat.KeyMaxInputSize))
                     {
                         int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize);
                         bufferSize = newSize > bufferSize ? newSize : bufferSize;
                     }
                 }
             }
             if (bufferSize < 0)
             {
                 bufferSize = 1337; //TODO: I don't know what to put here tbh, it will most likely be above 0 at this point anyways :)
             }
             // Set up the orientation and starting time for extractor.
             MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever();
             retrieverSrc.SetDataSource(inputPath);
             string degreesString = retrieverSrc.ExtractMetadata(MetadataKey.VideoRotation);
             if (degreesString != null)
             {
                 int degrees = int.Parse(degreesString);
                 if (degrees >= 0)
                 {
                     muxer.SetOrientationHint(degrees);
                 }
             }
             if (startMS > 0)
             {
                 extractor.SeekTo(startMS * 1000, MediaExtractorSeekTo.ClosestSync);
             }
             // Copy the samples from MediaExtractor to MediaMuxer. We will loop
             // for copying each sample and stop when we get to the end of the source
             // file or exceed the end time of the trimming.
             int offset = 0;
             int trackIndex = -1;
             ByteBuffer dstBuf = ByteBuffer.Allocate(bufferSize);
             MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
             try
             {
                 muxer.Start();
                 while (true)
                 {
                     bufferInfo.Offset = offset;
                     bufferInfo.Size = extractor.ReadSampleData(dstBuf, offset);
                     if (bufferInfo.Size < 0)
                     {
                         bufferInfo.Size = 0;
                         break;
                     }
                     else
                     {
                         bufferInfo.PresentationTimeUs = extractor.SampleTime;
                         if (lengthMS > 0 && bufferInfo.PresentationTimeUs > ((startMS + lengthMS - 1) * 1000))
                         {
                             Console.WriteLine("The current sample is over the trim end time.");
                             break;
                         }
                         else
                         {
                             bufferInfo.Flags = ConvertMediaExtractorSampleFlagsToMediaCodecBufferFlags(extractor.SampleFlags);
                             trackIndex = extractor.SampleTrackIndex;
                             muxer.WriteSampleData(indexDict[trackIndex], dstBuf, bufferInfo);
                             extractor.Advance();
                         }
                     }
                 }
                 muxer.Stop();
                 didOperationSucceed = true;
                 //deleting the old file
                 //JFile file = new JFile(srcPath);
                 //file.Delete();
             }
             catch (IllegalStateException e)
             {
                 // Swallow the exception due to malformed source.
                 Console.WriteLine("The source video file is malformed");
             }
             finally
             {
                 muxer.Release();
             }
             return didOperationSucceed;
         }
         catch (System.Exception xx)
         {
             return false;
         }
     }));
     // Set up MediaExtractor to read from the source.
 }
コード例 #12
0
        /**
         * Tests extraction from an MP4 to a series of PNG files.
         * <p>
         * We scale the video to 640x480 for the PNG just to demonstrate that we can scale the
         * video with the GPU.  If the input video has a different aspect ratio, we could preserve
         * it by adjusting the GL viewport to get letterboxing or pillarboxing, but generally if
         * you're extracting frames you don't want black bars.
         */
        public void extractMpegFrames(int saveWidth, int saveHeight)
        {
            MediaCodec         decoder       = null;
            CodecOutputSurface outputSurface = null;
            MediaExtractor     extractor     = null;

            try
            {
                // must be an absolute path The MediaExtractor error messages aren't very useful.  Check to see if the input file exists so we can throw a better one if it's not there.
                File inputFile = new File(_filesdir, INPUT_FILE);
                if (!inputFile.CanRead())
                {
                    throw new FileNotFoundException("Unable to read " + inputFile);
                }

                extractor = new MediaExtractor();
                extractor.SetDataSource(inputFile.ToString());
                int trackIndex = selectTrack(extractor);
                if (trackIndex < 0)
                {
                    throw new RuntimeException("No video track found in " + inputFile);
                }
                extractor.SelectTrack(trackIndex);

                MediaFormat format = extractor.GetTrackFormat(trackIndex);

                if (VERBOSE)
                {
                    Log.Info(TAG, "Video size is " + format.GetInteger(MediaFormat.KeyWidth) + "x" + format.GetInteger(MediaFormat.KeyHeight));
                }


                // Could use width/height from the MediaFormat to get full-size frames.

                outputSurface = new CodecOutputSurface(saveWidth, saveHeight);

                // Create a MediaCodec decoder, and configure it with the MediaFormat from the
                // extractor.  It's very important to use the format from the extractor because
                // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
                String mime = format.GetString(MediaFormat.KeyMime);
                decoder = MediaCodec.CreateDecoderByType(mime);
                decoder.Configure(format, outputSurface.getSurface(), null, 0);
                decoder.Start();

                doExtract(extractor, trackIndex, decoder, outputSurface);
            }
            finally
            {
                // release everything we grabbed
                if (outputSurface != null)
                {
                    outputSurface.release();
                    outputSurface = null;
                }
                if (decoder != null)
                {
                    decoder.Stop();
                    decoder.Release();
                    decoder = null;
                }
                if (extractor != null)
                {
                    extractor.Release();
                    extractor = null;
                }
            }
        }
コード例 #13
0
        /**
         * Converts the image obtained from the decoder to NV21.
         */
        private void convertToNV21(int k)
        {
            byte[] buffer = new byte[3 * mSize / 2];

            int  stride = mWidth, sliceHeight = mHeight;
            int  colorFormat = mDecoderColorFormat;
            bool planar      = false;

            if (mDecOutputFormat != null)
            {
                MediaFormat format = mDecOutputFormat;
                if (format != null)
                {
                    if (format.ContainsKey("slice-height"))
                    {
                        sliceHeight = format.GetInteger("slice-height");
                        if (sliceHeight < mHeight)
                        {
                            sliceHeight = mHeight;
                        }
                    }
                    if (format.ContainsKey("stride"))
                    {
                        stride = format.GetInteger("stride");
                        if (stride < mWidth)
                        {
                            stride = mWidth;
                        }
                    }
                    if (format.ContainsKey(MediaFormat.KeyColorFormat) && format.GetInteger(MediaFormat.KeyColorFormat) > 0)
                    {
                        colorFormat = format.GetInteger(MediaFormat.KeyColorFormat);
                    }
                }
            }

            switch ((MediaCodecCapabilities)colorFormat)
            {
            case MediaCodecCapabilities.Formatyuv420semiplanar:
            case MediaCodecCapabilities.Formatyuv420packedsemiplanar:
            case MediaCodecCapabilities.TiFormatyuv420packedsemiplanar:
                planar = false;
                break;

            case MediaCodecCapabilities.Formatyuv420planar:
            case MediaCodecCapabilities.Formatyuv420packedplanar:
                planar = true;
                break;
            }

            for (int i = 0; i < mSize; i++)
            {
                if (i % mWidth == 0)
                {
                    i += stride - mWidth;
                }
                buffer[i] = mDecodedVideo[k][i];
            }

            if (!planar)
            {
                for (int i = 0, j = 0; j < mSize / 4; i += 1, j += 1)
                {
                    if (i % mWidth / 2 == 0)
                    {
                        i += (stride - mWidth) / 2;
                    }
                    buffer[mSize + 2 * j + 1] = mDecodedVideo[k][stride * sliceHeight + 2 * i];
                    buffer[mSize + 2 * j]     = mDecodedVideo[k][stride * sliceHeight + 2 * i + 1];
                }
            }
            else
            {
                for (int i = 0, j = 0; j < mSize / 4; i += 1, j += 1)
                {
                    if (i % mWidth / 2 == 0)
                    {
                        i += (stride - mWidth) / 2;
                    }
                    buffer[mSize + 2 * j + 1] = mDecodedVideo[k][stride * sliceHeight + i];
                    buffer[mSize + 2 * j]     = mDecodedVideo[k][stride * sliceHeight * 5 / 4 + i];
                }
            }

            mDecodedVideo[k] = buffer;
        }
コード例 #14
0
        private static bool genVideoUsingMuxer(String srcPath, String dstPath, long startMicroSeconds, long endMicroSeconds, bool useAudio, bool useVideo)
        {
            if (startMicroSeconds == endMicroSeconds)
            {
                throw new InvalidParameterException("You shit!! end has to be greater than start!!");
            }
            // Set up MediaExtractor to read from the source.
            MediaExtractor extractor = new MediaExtractor();

            extractor.SetDataSource(srcPath);
            int trackCount = extractor.TrackCount;
            // Set up MediaMuxer for the destination.
            var muxer = new MediaMuxer(dstPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4);
            // Set up the tracks and retrieve the max buffer size for selected
            // tracks.
            Dictionary <int, int> indexMap = new Dictionary <int, int>(trackCount);
            int bufferSize = -1;

            for (int i = 0; i < trackCount; i++)
            {
                MediaFormat format             = extractor.GetTrackFormat(i);
                String      mime               = format.GetString(MediaFormat.KeyMime);
                bool        selectCurrentTrack = false;

                if (mime.StartsWith("audio/") && useAudio)
                {
                    selectCurrentTrack = true;
                }
                else if (mime.StartsWith("video/") && useVideo)
                {
                    selectCurrentTrack = true;
                }

                if (selectCurrentTrack)
                {
                    extractor.SelectTrack(i);
                    int dstIndex = muxer.AddTrack(format);
                    indexMap.Add(i, dstIndex);

                    if (format.ContainsKey(MediaFormat.KeyMaxInputSize))
                    {
                        int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize);
                        bufferSize = newSize > bufferSize? newSize : bufferSize;
                    }
                }
            }

            if (bufferSize < 0)
            {
                bufferSize = DEFAULT_BUFFER_SIZE;
            }
            // Set up the orientation and starting time for extractor.
            MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever();

            retrieverSrc.SetDataSource(srcPath);
            String degreesString = retrieverSrc.ExtractMetadata(MediaMetadataRetriever.MetadataKeyVideoRotation);

            if (degreesString != null)
            {
                int degrees = Integer.ParseInt(degreesString);
                if (degrees >= 0)
                {
                    muxer.SetOrientationHint(degrees);
                }
            }

            if (startMicroSeconds > 0)
            {
                extractor.SeekTo(startMicroSeconds, MediaExtractor.SeekToClosestSync);
            }
            // Copy the samples from MediaExtractor to MediaMuxer. We will loop
            // for copying each sample and stop when we get to the end of the source
            // file or exceed the end time of the trimming.
            int        offset     = 0;
            int        trackIndex = -1;
            ByteBuffer dstBuf     = ByteBuffer.Allocate(bufferSize);

            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();

            try
            {
                muxer.Start();
                while (true)
                {
                    bufferInfo.Offset = offset;
                    bufferInfo.Size   = extractor.ReadSampleData(dstBuf, offset);
                    if (bufferInfo.Size < 0)
                    {
                        Log.Info(LOGTAG, "Saw input EOS.");
                        bufferInfo.Size = 0;
                        break;
                    }
                    else
                    {
                        bufferInfo.PresentationTimeUs = extractor.SampleTime;
                        if (endMicroSeconds > 0 && bufferInfo.PresentationTimeUs > endMicroSeconds)
                        {
                            Log.Info(LOGTAG, "The current sample is over the trim end time.");
                            break;
                        }
                        else
                        {
                            bufferInfo.Flags = GetSyncsampleflags(extractor.SampleFlags); //had to map this shit not sure if its right
                            trackIndex       = extractor.SampleTrackIndex;
                            muxer.WriteSampleData(indexMap[trackIndex], dstBuf, bufferInfo);
                            extractor.Advance();
                        }
                    }
                }
                muxer.Stop();
            }
            catch (IllegalStateException e)
            {
                // Swallow the exception due to malformed source.
                Log.Info(LOGTAG, "The source video file is malformed");
                return(false);
            }
            finally
            {
                muxer.Release();
            }
            return(true);
        }
コード例 #15
0
        public static Result DecodeAudio(FileDescriptor descriptor, long offset, long length)
        {
            using (var extractor = new MediaExtractor())
            {
                extractor.SetDataSource(descriptor, offset, length);

                MediaFormat format = null;
                string      mime   = null;
                for (int i = 0; i < extractor.TrackCount; i++)
                {
                    format = extractor.GetTrackFormat(i);
                    mime   = format.GetString(MediaFormat.KeyMime);

                    if (!mime.StartsWith("audio/"))
                    {
                        continue;
                    }
                    extractor.SelectTrack(i);
                }

                if (format == null || !mime.StartsWith("audio/"))
                {
                    throw new ContentLoadException("Could not find any audio track.");
                }

                int  sampleRate   = format.GetInteger(MediaFormat.KeySampleRate);
                long duration     = format.GetLong(MediaFormat.KeyDuration);
                int  channels     = format.GetInteger(MediaFormat.KeyChannelCount);
                int  samples      = (int)(sampleRate * duration / 1000000d);
                var  output       = new byte[samples * 2];
                int  timeoutsLeft = 1000;

                var decoder = MediaCodecPool.RentDecoder(mime);
                try
                {
                    decoder.Configure(format, null, null, MediaCodecConfigFlags.None);
                    decoder.Start();

                    ByteBuffer[] inputBuffers  = decoder.GetInputBuffers();
                    ByteBuffer[] outputBuffers = decoder.GetOutputBuffers();

                    var  bufferInfo  = new MediaCodec.BufferInfo();
                    int  totalOffset = 0;
                    bool endOfStream = false;
                    while (true)
                    {
                        // we dont need to have a endOfStream local,
                        // but it saves us a few calls to the decoder
                        if (!endOfStream)
                        {
                            int inputBufIndex = decoder.DequeueInputBuffer(5000);
                            if (inputBufIndex >= 0)
                            {
                                int size = extractor.ReadSampleData(inputBuffers[inputBufIndex], 0);
                                if (size > 0)
                                {
                                    decoder.QueueInputBuffer(
                                        inputBufIndex, 0, size, extractor.SampleTime, MediaCodecBufferFlags.None);
                                }

                                if (!extractor.Advance())
                                {
                                    endOfStream = true;
                                    decoder.QueueInputBuffer(
                                        inputBufIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream);
                                }
                            }
                        }

                        int decoderStatus = decoder.DequeueOutputBuffer(bufferInfo, 5000);
                        if (decoderStatus >= 0)
                        {
                            IntPtr bufferPtr = outputBuffers[decoderStatus].GetDirectBufferAddress();
                            IntPtr offsetPtr = bufferPtr + bufferInfo.Offset;
                            int    size      = bufferInfo.Size;
                            Marshal.Copy(offsetPtr, output, totalOffset, size);

                            decoder.ReleaseOutputBuffer(decoderStatus, render: false);
                            totalOffset += size;

                            if (bufferInfo.Flags == MediaCodecBufferFlags.EndOfStream)
                            {
                                if (totalOffset != output.Length)
                                {
                                    throw new ContentLoadException(
                                              "Reached end of stream before reading expected amount of samples.");
                                }
                                break;
                            }
                        }
                        else if (decoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                        {
                            outputBuffers = decoder.GetOutputBuffers();
                        }
                        else if (decoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                        {
                            if (timeoutsLeft-- <= 0)
                            {
                                break;
                            }
                        }
                    }
                }
                finally
                {
                    decoder.Stop();
                    MediaCodecPool.ReturnDecoder(mime, decoder);
                }

                if (timeoutsLeft <= 0)
                {
                    throw new ContentLoadException("Could not load sound effect in designated time frame.");
                }
                return(new Result(output, sampleRate, channels, mime));
            }
        }
コード例 #16
0
        private static String printAudioMetadata(Context context, MediaFormat mediaFormat)
        {
            if (mediaFormat == null)
            {
                return("\n");
            }
            StringBuilder stringBuilder = new StringBuilder();

            if (mediaFormat.ContainsKey(MediaFormat.KeyMime))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_mime_type, mediaFormat.GetString(MediaFormat.KeyMime)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyChannelCount))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_channel_count, mediaFormat.GetInteger(MediaFormat.KeyChannelCount)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyBitRate))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_bitrate, mediaFormat.GetInteger(MediaFormat.KeyBitRate)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyDuration))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_duration, mediaFormat.GetLong(MediaFormat.KeyDuration)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeySampleRate))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_sampling_rate, mediaFormat.GetInteger(MediaFormat.KeySampleRate)));
            }
            return(stringBuilder.ToString());
        }
コード例 #17
0
        private static String printVideoMetadata(Context context, MediaFormat mediaFormat)
        {
            if (mediaFormat == null)
            {
                return("\n");
            }
            StringBuilder stringBuilder = new StringBuilder();

            if (mediaFormat.ContainsKey(MediaFormat.KeyMime))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_mime_type, mediaFormat.GetString(MediaFormat.KeyMime)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyWidth))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_width, mediaFormat.GetInteger(MediaFormat.KeyWidth)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyHeight))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_height, mediaFormat.GetInteger(MediaFormat.KeyHeight)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyBitRate))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_bitrate, mediaFormat.GetInteger(MediaFormat.KeyBitRate)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyDuration))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_duration, mediaFormat.GetLong(MediaFormat.KeyDuration)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyFrameRate))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_frame_rate, MediaFormatUtils.GetFrameRate(mediaFormat, new Java.Lang.Integer(0)).IntValue()));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyIFrameInterval))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_key_frame_interval, MediaFormatUtils.GetIFrameInterval(mediaFormat, new Java.Lang.Integer(0)).IntValue()));
            }
            if (mediaFormat.ContainsKey(KEY_ROTATION))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_rotation, mediaFormat.GetInteger(KEY_ROTATION)));
            }
            return(stringBuilder.ToString());
        }
コード例 #18
0
        private static String printImageMetadata(Context context, MediaFormat mediaFormat)
        {
            if (mediaFormat == null)
            {
                return("\n");
            }
            StringBuilder stringBuilder = new StringBuilder();

            if (mediaFormat.ContainsKey(MediaFormat.KeyMime))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_mime_type, mediaFormat.GetString(MediaFormat.KeyMime)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyWidth))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_width, mediaFormat.GetInteger(MediaFormat.KeyWidth)));
            }
            if (mediaFormat.ContainsKey(MediaFormat.KeyHeight))
            {
                stringBuilder.AppendLine(context.GetString(Resource.String.stats_height, mediaFormat.GetInteger(MediaFormat.KeyHeight)));
            }
            return(stringBuilder.ToString());
        }
コード例 #19
0
        /// <summary>
        /// if both inputPath string and inputUri are not null, this
        /// method will use the Uri.  Else, set one or the other
        ///
        /// They cannot both be null
        /// </summary>
        /// <param name="startMs">the start ms for trimming</param>
        /// <param name="endMs">the final ms for trimming</param>
        /// <param name="inputPath">optional input path string</param>
        /// <param name="muxer">the muxer to use for writing bytes</param>
        /// <param name="trackIndexOverride">the track index for muxer read/write to</param>
        /// <param name="bufferInfo">an input bufferinfo to get properties from</param>
        /// <param name="outputPath">the output path for method to check after finished encoding</param>
        /// <param name="ptOffset">the presentation time offset for audio, used in syncing audio and video</param>
        /// <param name="inputUri">optional inputUri to read from</param>
        /// <returns></returns>
        public async Task <string> HybridMuxingTrimmer(int startMs, int endMs, string inputPath, MediaMuxer muxer, int trackIndexOverride = -1, BufferInfo bufferInfo = null, string outputPath = null, long ptOffset = 0, Android.Net.Uri inputUri = null)
        {
            var tio = trackIndexOverride;
            await Task.Run(() =>
            {
                if (outputPath == null)
                {
                    outputPath = FileToMp4.LatestOutputPath;
                }
                MediaExtractor ext = new MediaExtractor();
                if (inputUri != null)
                {
                    ext.SetDataSource(Android.App.Application.Context, inputUri, null);
                }
                else
                {
                    ext.SetDataSource(inputPath);
                }
                int trackCount = ext.TrackCount;
                Dictionary <int, int> indexDict = new Dictionary <int, int>(trackCount);
                int bufferSize = -1;
                for (int i = 0; i < trackCount; i++)
                {
                    MediaFormat format      = ext.GetTrackFormat(i);
                    string mime             = format.GetString(MediaFormat.KeyMime);
                    bool selectCurrentTrack = false;
                    if (mime.StartsWith("audio/"))
                    {
                        selectCurrentTrack = true;
                    }
                    else if (mime.StartsWith("video/"))
                    {
                        selectCurrentTrack = false;
                    }                                                                   /*rerouted to gl video encoder*/
                    if (selectCurrentTrack)
                    {
                        ext.SelectTrack(i);
                        if (tio != -1)
                        {
                            indexDict.Add(i, i);
                        }
                        if (format.ContainsKey(MediaFormat.KeyMaxInputSize))
                        {
                            int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize);
                            bufferSize  = newSize > bufferSize ? newSize : bufferSize;
                        }
                    }
                }
                MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever();
                if (!System.String.IsNullOrWhiteSpace(inputPath))
                {
                    retrieverSrc.SetDataSource(inputPath);
                }
                else
                {
                    retrieverSrc.SetDataSource(Android.App.Application.Context, inputUri);
                }
                string degreesString = retrieverSrc.ExtractMetadata(MetadataKey.VideoRotation);
                if (degreesString != null) // unused ATM but will be useful for stabilized videoview in streaming
                {
                    int degrees = int.Parse(degreesString);
                    if (degrees >= 0) /* muxer.SetOrientationHint(degrees); */ } {
                    //muxer won't accept this param once started
            }
                           if (startMs > 0)
                {
                    ext.SeekTo(startMs * 1000, MediaExtractorSeekTo.ClosestSync);
                }
                           int offset = 0;
                           if (bufferInfo == null)
                {
                    bufferInfo = new MediaCodec.BufferInfo();
                }
                           ByteBuffer dstBuf = ByteBuffer.Allocate(bufferSize);
                           long us           = endMs * 1000;
                           long uo           = us + ptOffset;
                           int cf            = 0;
                           try
                {
                    FileToMp4.AudioEncodingInProgress = true;
                    while (true)
                    {
                        bufferInfo.Offset = offset;
                        bufferInfo.Size   = ext.ReadSampleData(dstBuf, offset);
                        if (bufferInfo.Size < 0)
                        {
                            bufferInfo.Size = 0; break;
                        }
                        else
                        {
                            cf++;
                            bufferInfo.PresentationTimeUs = ext.SampleTime + ptOffset;
                            if (ext.SampleTime >= us)
                            {
                                break;
                            }                                    //out of while
                            else
                            {
                                bufferInfo.Flags = MFlags2MCodecBuff(ext.SampleFlags);
                                if (tio == -1)
                                {
                                    muxer.WriteSampleData(FileToMp4.LatestAudioTrackIndex, dstBuf, bufferInfo);
                                }
                                else
                                {
                                    muxer.WriteSampleData(tio, dstBuf, bufferInfo);
                                }
                                if (cf >= 240) //only send the muxer eventargs once every x frames to reduce CPU load
                                {
                                    Notify(ext.SampleTime, us);
                                    cf = 0;
                                }
                            }
                            ext.Advance();
                        }
                    }
                }
                           catch (Java.Lang.IllegalStateException e)
                {
                    this.Progress.Invoke(new MuxerEventArgs(ext.SampleTime, us, null, true, true));
                    Console.WriteLine("The source video file is malformed");
                }
                           catch (Java.Lang.Exception ex)
                {
                    this.Progress.Invoke(new MuxerEventArgs(ext.SampleTime, us, null, true, true));
                    Console.WriteLine(ex.Message);
                }
                           if (AppSettings.Logging.SendToConsole)
                {
                    System.Console.WriteLine($"DrainEncoder audio finished @ {bufferInfo.PresentationTimeUs}");
                }
});

            FileToMp4.AudioEncodingInProgress = false;
            try
            {
                if (!FileToMp4.VideoEncodingInProgress)
                {
                    muxer.Stop();
                    muxer.Release();
                    muxer = null;
                }
            }
            catch (Java.Lang.Exception ex) { Log.Debug("MuxingEncoder", ex.Message); }
            if (outputPath != null)
            {
                var success = System.IO.File.Exists(outputPath);
                if (success)
                {
                    this.Progress.Invoke(new MuxerEventArgs(endMs * 1000, endMs, outputPath, true));
                    return(outputPath);
                }
            }

            return(null); //nothing to look for
        }
コード例 #20
0
ファイル: FramesExtract.cs プロジェクト: borisblanc/GrowPea
        public void PrepareEncoder(string path, File _downloaddir)
        {
            MediaCodec     _Decoder  = null;
            MediaExtractor extractor = null;

            _downloadsfilesdir = _downloaddir;


            try {
                //for (int i = 0; i < extractor.TrackCount; i++)
                //{
                //    MediaFormat Format = extractor.GetTrackFormat(i);
                //    //MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, 640, 360);
                //    String mime = Format.GetString(MediaFormat.KeyMime);
                //    if (mime.StartsWith("video/"))
                //    {

                //            extractor.SelectTrack(i);
                //            _Decoder = MediaCodec.CreateEncoderByType(mime);

                //            _Decoder.Configure(Format, null, null, 0);
                //            break;

                //    }
                //}

                extractor = new MediaExtractor();
                extractor.SetDataSource(path);
                int trackIndex = selectTrack(extractor);
                //if (trackIndex < 0)
                //{
                //    throw new RuntimeException("No video track found in " + inputFile);
                //}
                extractor.SelectTrack(trackIndex);

                MediaFormat format = extractor.GetTrackFormat(trackIndex);

                _Width  = format.GetInteger(MediaFormat.KeyWidth);
                _Height = format.GetInteger(MediaFormat.KeyHeight);


                // Could use width/height from the MediaFormat to get full-size frames.

                //outputSurface = new CodecOutputSurface(saveWidth, saveHeight);

                // Create a MediaCodec decoder, and configure it with the MediaFormat from the
                // extractor.  It's very important to use the format from the extractor because
                // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
                String mime = format.GetString(MediaFormat.KeyMime);
                _Decoder = MediaCodec.CreateDecoderByType(mime);
                _Decoder.Configure(format, null, null, 0);
                _Decoder.Start();



                Decode(_Decoder, extractor);
            }
            catch (Exception e)
            {
                Log.Error(TAG, e.Message, e);
                throw;
            }
            finally
            {
                // release everything we grabbed
                //if (outputSurface != null)
                //{
                //    outputSurface.release();
                //    outputSurface = null;
                //}
                if (_Decoder != null)
                {
                    _Decoder.Stop();
                    _Decoder.Release();
                    _Decoder = null;
                }
                if (extractor != null)
                {
                    extractor.Release();
                    extractor = null;
                }
            }

            _TrackIndex = -1;
            //_MuxerStarted = false;
        }