Exemplo n.º 1
0
        private static unsafe void Main(string[] args)
        {
            // register LD_LIBRARY_PATH on window
            InteropHelper.RegisterLibrariesSearchPath(Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH));

            string url = @"http://www.fileformat.info/format/mpeg/sample/031699cb978244b8a3adf1e81cb2ac7c/download";

            FFmpegInvoke.av_register_all();
            FFmpegInvoke.avcodec_register_all();
            FFmpegInvoke.avformat_network_init();


            AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context();

            if (FFmpegInvoke.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new Exception("Could not open file");
            }

            if (FFmpegInvoke.av_find_stream_info(pFormatContext) != 0)
            {
                throw new Exception("Could not find stream info");
            }

            AVStream *pStream = null;

            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[0]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[0];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new Exception("Could not found video stream");
            }

            AVCodecContext codecContext    = *(pStream->codec);
            int            width           = codecContext.width;
            int            height          = codecContext.height;
            AVPixelFormat  sourcePixFmt    = codecContext.pix_fmt;
            AVCodecID      codecId         = codecContext.codec_id;
            var            convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24;
            SwsContext *   pConvertContext = FFmpegInvoke.sws_getContext(width, height, sourcePixFmt,
                                                                         width, height, convertToPixFmt,
                                                                         FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new Exception("Could not initialize the conversion context");
            }

            var pConvertedFrame          = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame();
            int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, width, height);
            var pConvertedFrameBuffer    = (byte *)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize);

            FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height);

            AVCodec *pCodec = FFmpegInvoke.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new Exception("Unsupported codec");
            }

            AVCodecContext *pCodecContext = FFmpegInvoke.avcodec_alloc_context3(pCodec);

            if ((pCodec->capabilities & FFmpegInvoke.CODEC_CAP_TRUNCATED) == FFmpegInvoke.CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= FFmpegInvoke.CODEC_FLAG_TRUNCATED;
            }

            if (FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            AVFrame *pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame();

            var       packet  = new AVPacket();
            AVPacket *pPacket = &packet;

            FFmpegInvoke.av_init_packet(pPacket);

            int frameNumber = 0;

            while (frameNumber < 100)
            {
                Console.WriteLine("frame: {0}", frameNumber);

                if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0)
                {
                    throw new Exception("Could not read frame");
                }

                if (pPacket->stream_index != pStream->index)
                {
                    continue;
                }

                int gotPicture = 0;
                int size       = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket);
                if (size < 0)
                {
                    throw new Exception(string.Format("Error while decoding frame {0}", frameNumber));
                }

                if (gotPicture == 1)
                {
                    byte **src = &pDecodedFrame->data_0;
                    byte **dst = &pConvertedFrame->data_0;
                    FFmpegInvoke.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0,
                                           height, dst, pConvertedFrame->linesize);

                    byte *convertedFrameAddress = pConvertedFrame->data_0;

                    var imageBufferPtr = new IntPtr(convertedFrameAddress);

                    using (var bitmap = new Bitmap(width, height, pConvertedFrame->linesize[0], PixelFormat.Format24bppRgb, imageBufferPtr))
                    {
                        bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);
                    }
                }
                frameNumber++;
            }

            FFmpegInvoke.av_free(pConvertedFrame);
            FFmpegInvoke.av_free(pConvertedFrameBuffer);
            FFmpegInvoke.sws_freeContext(pConvertContext);

            FFmpegInvoke.av_free(pDecodedFrame);
            FFmpegInvoke.avcodec_close(pCodecContext);
            FFmpegInvoke.avformat_close_input(&pFormatContext);
        }
Exemplo n.º 2
0
        private static unsafe void Main(string[] args)
        {
            Console.WriteLine(@"Current directory: " + Environment.CurrentDirectory);
            Console.WriteLine(@"Runnung in {0}-bit mode.", Environment.Is64BitProcess ? @"64" : @"32");

            // register path to ffmpeg
            switch (Environment.OSVersion.Platform)
            {
            case PlatformID.Win32NT:
            case PlatformID.Win32S:
            case PlatformID.Win32Windows:
                var ffmpegPath = string.Format(@"../../../../FFmpeg/bin/{0}", Environment.Is64BitProcess ? @"x64" : @"x86");
                InteropHelper.RegisterLibrariesSearchPath(ffmpegPath);
                break;

            case PlatformID.Unix:
            case PlatformID.MacOSX:
                var libraryPath = Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH);
                InteropHelper.RegisterLibrariesSearchPath(libraryPath);
                break;
            }

            // decode 100 frame from url or path

            //string url = @"../../sample_mpeg4.mp4";
            var url = @"http://hubblesource.stsci.edu/sources/video/clips/details/images/centaur_1.mpg";

            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();
            ffmpeg.avformat_network_init();


            var pFormatContext = ffmpeg.avformat_alloc_context();

            if (ffmpeg.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new ApplicationException(@"Could not open file");
            }

            if (ffmpeg.avformat_find_stream_info(pFormatContext, null) != 0)
            {
                throw new ApplicationException(@"Could not find stream info");
            }

            AVStream *pStream = null;

            for (var i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new ApplicationException(@"Could not found video stream");
            }

            var codecContext    = *pStream->codec;
            var width           = codecContext.width;
            var height          = codecContext.height;
            var sourcePixFmt    = codecContext.pix_fmt;
            var codecId         = codecContext.codec_id;
            var convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24;
            var pConvertContext = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                                                        width, height, convertToPixFmt,
                                                        ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new ApplicationException(@"Could not initialize the conversion context");
            }

            var pConvertedFrame          = ffmpeg.avcodec_alloc_frame();
            var convertedFrameBufferSize = ffmpeg.avpicture_get_size(convertToPixFmt, width, height);
            var pConvertedFrameBuffer    = (sbyte *)ffmpeg.av_malloc((ulong)convertedFrameBufferSize);

            ffmpeg.avpicture_fill((AVPicture *)pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height);

            var pCodec = ffmpeg.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new ApplicationException(@"Unsupported codec");
            }

            // Reusing codec context from stream info, initally it was looking like this:
            // AVCodecContext* pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec); // but it is not working for all kind of codecs
            var pCodecContext = &codecContext;

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            if (ffmpeg.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new ApplicationException(@"Could not open codec");
            }

            var pDecodedFrame = ffmpeg.av_frame_alloc();

            var packet  = new AVPacket();
            var pPacket = &packet;

            ffmpeg.av_init_packet(pPacket);

            var frameNumber = 0;

            while (frameNumber < 100)
            {
                if (ffmpeg.av_read_frame(pFormatContext, pPacket) < 0)
                {
                    throw new ApplicationException(@"Could not read frame");
                }

                if (pPacket->stream_index != pStream->index)
                {
                    continue;
                }

                var gotPicture = 0;
                var size       = ffmpeg.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket);
                if (size < 0)
                {
                    throw new ApplicationException(string.Format(@"Error while decoding frame {0}", frameNumber));
                }

                if (gotPicture == 1)
                {
                    Console.WriteLine(@"frame: {0}", frameNumber);

                    var src       = &pDecodedFrame->data0;
                    var dst       = &pConvertedFrame->data0;
                    var srcStride = pDecodedFrame->linesize;
                    var dstStride = pConvertedFrame->linesize;
                    ffmpeg.sws_scale(pConvertContext, src, srcStride, 0, height, dst, dstStride);

                    var convertedFrameAddress = pConvertedFrame->data0;

                    var imageBufferPtr = new IntPtr(convertedFrameAddress);

                    var linesize = dstStride[0];
                    using (var bitmap = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr))
                    {
                        bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);
                    }

                    frameNumber++;
                }
            }

            ffmpeg.av_free(pConvertedFrame);
            ffmpeg.av_free(pConvertedFrameBuffer);
            ffmpeg.sws_freeContext(pConvertContext);

            ffmpeg.av_free(pDecodedFrame);
            ffmpeg.avcodec_close(pCodecContext);
            ffmpeg.avformat_close_input(&pFormatContext);
        }
Exemplo n.º 3
0
        public unsafe void video_encode_example(
            string outputFilename,
            int codec_id)
        {
            // register path to ffmpeg
            switch (Environment.OSVersion.Platform)
            {
            case PlatformID.Win32NT:
            case PlatformID.Win32S:
            case PlatformID.Win32Windows:
                var ffmpegPath = $@"../../../../FFmpeg/bin/{(Environment.Is64BitProcess ? @"x64" : @"x86")}";
                InteropHelper.RegisterLibrariesSearchPath(ffmpegPath);
                break;

            case PlatformID.Unix:
            case PlatformID.MacOSX:
                var libraryPath = Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH);
                InteropHelper.RegisterLibrariesSearchPath(libraryPath);
                break;
            }

            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();
            ffmpeg.avformat_network_init();

            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

            AVCodec *       codec;
            AVCodecContext *c = null;
            int             i, ret, x, y, got_output;
            //FILE* f;
            AVFrame * frame;
            AVPacket *pkt;
            //uint8_t endcode[] = { 0, 0, 1, 0xb7 };
            var endcode = new byte[4];

            endcode[0] = 0;
            endcode[1] = 0;
            endcode[2] = 1;
            endcode[3] = 0xb7;


            //find the h264 video encoder
            codec = ffmpeg.avcodec_find_encoder((AVCodecID)codec_id);
            if (codec == null)
            {
                throw new ApplicationException(@"Unsupported codec");
            }

            c = ffmpeg.avcodec_alloc_context3(codec);
            if (c == null)
            {
                throw new ApplicationException("Could not allocate video codec context\n");
            }

            /* put sample parameters */
            c->bit_rate = 400000;
            /* resolution must be a multiple of two */
            c->width  = 640;
            c->height = 360;
            /* frames per second */
            c->time_base = new AVRational {
                num = 1, den = 25
            };


            /* emit one intra frame every ten frames
             * check frame pict_type before passing frame
             * to encoder, if frame->pict_type is AV_PICTURE_TYPE_I
             * then gop_size is ignored and the output of encoder
             * will always be I frame irrespective to gop_size
             */
            c->gop_size     = 10;
            c->max_b_frames = 1;
            c->pix_fmt      = AVPixelFormat.AV_PIX_FMT_YUV420P;

            if ((AVCodecID)codec_id == AVCodecID.AV_CODEC_ID_H264)
            {
                ffmpeg.av_opt_set(c->priv_data, "preset", "slow", 0);
            }

            /* open it */
            if (ffmpeg.avcodec_open2(c, codec, null) < 0)
            {
                throw new ApplicationException("Could not open codec\n");
            }

            if (File.Exists(outputFilename))
            {
                File.Delete(outputFilename);
            }

            var fileStream = File.Open(outputFilename, FileMode.OpenOrCreate);

            if (fileStream == null)
            {
                throw new ApplicationException($"Could not open {outputFilename}\n");
            }

            frame = ffmpeg.av_frame_alloc();
            if (frame == null)
            {
                throw new ApplicationException($"Could not allocate video frame\n");
            }

            frame->format = (int)c->pix_fmt;
            frame->width  = c->width;
            frame->height = c->height;

            ret = ffmpeg.av_frame_get_buffer(frame, 32);
            if (ret < 0)
            {
                throw new ApplicationException("Could not allocate the video frame data\n");
            }

            var dstData = new byte_ptrArray4();

            dstData[0] = frame->data[0];
            dstData[1] = frame->data[1];
            dstData[2] = frame->data[2];
            dstData[3] = frame->data[3];

            var dstLineSize = new int_array4();

            dstLineSize[0] = frame->linesize[0];
            dstLineSize[1] = frame->linesize[1];
            dstLineSize[2] = frame->linesize[2];
            dstLineSize[3] = frame->linesize[3];

            /* the image can be allocated by any means and av_image_alloc() is
             * just the most convenient way if av_malloc() is to be used */
            ret = ffmpeg.av_image_alloc(
                ref dstData,
                ref dstLineSize,
                c->width,
                c->height,
                c->pix_fmt, 32);

            if (ret < 0)
            {
                throw new ApplicationException("Could not allocate raw picture buffer\n");
            }

            //
            // RGB to YUV:
            //    http://stackoverflow.com/questions/16667687/how-to-convert-rgb-from-yuv420p-for-ffmpeg-encoder
            //
            // Create some dummy RGB "frame"
            byte *unmanagedRgba32Array = stackalloc byte[4 * c->width * c->height];


            var ctx = ffmpeg.sws_getContext(
                c->width,
                c->height,
                AVPixelFormat.AV_PIX_FMT_RGBA,
                c->width,
                c->height,
                AVPixelFormat.AV_PIX_FMT_YUV420P,
                ffmpeg.SWS_FAST_BILINEAR,
                (SwsFilter *)0,
                (SwsFilter *)0,
                (double *)0);


            pkt = ffmpeg.av_packet_alloc();

            var nextframe = -1;

            /* encode 1 second of video */
            for (i = 0; i < 1438; i++)
            {
                ffmpeg.av_init_packet(pkt);
                pkt->data = null; // packet data will be allocated by the encoder
                pkt->size = 0;

                fileStream.Flush();

                var managedRgba32Array = ProcessBitmap(i);

                int    size = Marshal.SizeOf(managedRgba32Array[0]) * managedRgba32Array.Length;
                IntPtr pnt  = Marshal.AllocHGlobal(size);
                Marshal.Copy(managedRgba32Array, 0, pnt, managedRgba32Array.Length);

                unmanagedRgba32Array = (byte *)pnt;

                var inData = new byte *[1];
                inData[0] = unmanagedRgba32Array;


                // NOTE: In a more general setting, the rows of your input image may
                //       be padded; that is, the bytes per row may not be 4 * width.
                //       In such cases, inLineSize should be set to that padded width.
                //
                //int inLinesize[1] = { 4 * c->width }; // RGBA stride
                var inLineSize = new int[1];
                inLineSize[0] = 4 * c->width;

                ffmpeg.sws_scale(ctx, inData, inLineSize, 0, c->height, frame->data, frame->linesize);

                Marshal.FreeHGlobal(pnt);

                frame->pts = i;

                /* encode the image */
                Encode(c, frame, pkt, fileStream);
            }

            /* flush the encoder */
            Encode(c, null, pkt, fileStream);

            /* add sequence end code to have a real MPEG file */
            fileStream.Write(endcode, 0, endcode.Length);
            fileStream.Close();

            ffmpeg.avcodec_free_context(&c);
            ffmpeg.av_frame_free(&frame);
            ffmpeg.av_packet_free(&pkt);
        }
Exemplo n.º 4
0
        private static unsafe void Main(string[] args)
        {
            Console.WriteLine("Runnung in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32");

            // register path to ffmpeg
            switch (Environment.OSVersion.Platform)
            {
            case PlatformID.Win32NT:
            case PlatformID.Win32S:
            case PlatformID.Win32Windows:
                string ffmpegPath = string.Format(@"../../../FFmpeg/bin/windows/{0}", Environment.Is64BitProcess ? "x64" : "x86");
                InteropHelper.RegisterLibrariesSearchPath(ffmpegPath);
                break;

            case PlatformID.Unix:
            case PlatformID.MacOSX:
                string libraryPath = Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH);
                InteropHelper.RegisterLibrariesSearchPath(libraryPath);
                break;
            }

            // decode 100 frame from url

            string url = @"http://hubblesource.stsci.edu/sources/video/clips/details/images/centaur_1.mpg";

            FFmpegInvoke.av_register_all();
            FFmpegInvoke.avcodec_register_all();
            FFmpegInvoke.avformat_network_init();


            AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context();

            if (FFmpegInvoke.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new Exception("Could not open file");
            }

            if (FFmpegInvoke.avformat_find_stream_info(pFormatContext, null) != 0)
            {
                throw new Exception("Could not find stream info");
            }

            AVStream *pStream = null;

            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new Exception("Could not found video stream");
            }

            AVCodecContext codecContext    = *(pStream->codec);
            int            width           = codecContext.width;
            int            height          = codecContext.height;
            AVPixelFormat  sourcePixFmt    = codecContext.pix_fmt;
            AVCodecID      codecId         = codecContext.codec_id;
            var            convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24;
            SwsContext *   pConvertContext = FFmpegInvoke.sws_getContext(width, height, sourcePixFmt,
                                                                         width, height, convertToPixFmt,
                                                                         FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new Exception("Could not initialize the conversion context");
            }

            var pConvertedFrame          = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame();
            int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, width, height);
            var pConvertedFrameBuffer    = (byte *)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize);

            FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height);

            AVCodec *pCodec = FFmpegInvoke.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new Exception("Unsupported codec");
            }

            // Reusing codec context from stream info,
            // as an alternative way it could look like this: (but it works not for all kind of codecs)
            // AVCodecContext* pCodecContext = FFmpegInvoke.avcodec_alloc_context3(pCodec);
            AVCodecContext *pCodecContext = &codecContext;

            if ((pCodec->capabilities & FFmpegInvoke.CODEC_CAP_TRUNCATED) == FFmpegInvoke.CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= FFmpegInvoke.CODEC_FLAG_TRUNCATED;
            }

            if (FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            AVFrame *pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame();

            var       packet  = new AVPacket();
            AVPacket *pPacket = &packet;

            FFmpegInvoke.av_init_packet(pPacket);

            int frameNumber = 0;

            while (frameNumber < 100)
            {
                Console.WriteLine("frame: {0}", frameNumber);

                if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0)
                {
                    throw new Exception("Could not read frame");
                }

                if (pPacket->stream_index != pStream->index)
                {
                    continue;
                }

                int gotPicture = 0;
                int size       = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket);
                if (size < 0)
                {
                    throw new Exception(string.Format("Error while decoding frame {0}", frameNumber));
                }

                if (gotPicture == 1)
                {
                    byte **src = &pDecodedFrame->data_0;
                    byte **dst = &pConvertedFrame->data_0;
                    FFmpegInvoke.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0,
                                           height, dst, pConvertedFrame->linesize);

                    byte *convertedFrameAddress = pConvertedFrame->data_0;

                    var imageBufferPtr = new IntPtr(convertedFrameAddress);

                    using (var bitmap = new Bitmap(width, height, pConvertedFrame->linesize[0], PixelFormat.Format24bppRgb, imageBufferPtr))
                    {
                        bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);
                    }
                }
                frameNumber++;
            }

            FFmpegInvoke.av_free(pConvertedFrame);
            FFmpegInvoke.av_free(pConvertedFrameBuffer);
            FFmpegInvoke.sws_freeContext(pConvertContext);

            FFmpegInvoke.av_free(pDecodedFrame);
            FFmpegInvoke.avcodec_close(pCodecContext);
            FFmpegInvoke.avformat_close_input(&pFormatContext);
        }