private unsafe void Button_Test_Click(object sender, RoutedEventArgs e)
        {
            Bitmap bitmap = CreateTestBitmap();

            AVFrame *inFrame = FFmpegInvoke.avcodec_alloc_frame();

            if (inFrame == null)
            {
                throw new Exception("Could not allocate video frame");
            }
            inFrame->width  = bitmap.Width;
            inFrame->height = bitmap.Height;
            inFrame->format = (int)AVPixelFormat.AV_PIX_FMT_BGR24;

            int ret1 = FFmpegInvoke.av_image_alloc(&inFrame->data_0, inFrame->linesize, bitmap.Width, bitmap.Height, AVPixelFormat.AV_PIX_FMT_BGR24, 32);

            if (ret1 < 0)
            {
                throw new Exception("Could not allocate raw picture buffer");
            }

            VideoHelper.UpdateFrame(inFrame, bitmap);
            VideoConverter converterToYuv = new VideoConverter(AVPixelFormat.AV_PIX_FMT_YUV420P);
            var            data           = converterToYuv.ConvertFrame(inFrame);

            var bitmap2 = VideoHelper.CreateBitmap(data, bitmap.Width, bitmap.Height);

            SetImageSource(bitmap2);
        }
Example #2
0
        public VideoDecoder()
        {
            AVCodec *codec = FFmpegInvoke.avcodec_find_decoder(CodecId);

            if (codec == null)
            {
                throw new Exception("Codec not found");
            }

            codec_context = FFmpegInvoke.avcodec_alloc_context3(codec);
            if (codec_context == null)
            {
                throw new Exception("Could not allocate video codec context");
            }

            if (FFmpegInvoke.avcodec_open2(codec_context, codec, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            avFrame = FFmpegInvoke.avcodec_alloc_frame();
            if (avFrame == null)
            {
                throw new Exception("Could not allocate video frame");
            }
        }
        public unsafe static void UpdateFrame(AVFrame *avFrame, Bitmap bitmap)
        {
            Rectangle  rect    = new Rectangle(0, 0, bitmap.Width, bitmap.Height);
            BitmapData bmpData = bitmap.LockBits(rect, ImageLockMode.ReadWrite, bitmap.PixelFormat);

            FFmpegInvoke.avpicture_fill((AVPicture *)avFrame, (byte *)bmpData.Scan0, (AVPixelFormat)avFrame->format, bitmap.Width, bitmap.Height);
            bitmap.UnlockBits(bmpData);
        }
Example #4
0
        protected override void DisposeUnmanaged()
        {
            FFmpegInvoke.avcodec_close(codec_context);

            fixed(AVFrame **p = &avFrame)
            {
                FFmpegInvoke.avcodec_free_frame(p);
            }
        }
Example #5
0
 static void InitFFMpeg()
 {
     if (!FFMpegInitialised)
     {
         FFmpegInvoke.av_register_all();
         FFmpegInvoke.avcodec_register_all();
         FFmpegInvoke.avformat_network_init();
     }
 }
Example #6
0
        protected override void DisposeOverride()
        {
            FFmpegInvoke.avcodec_close(_pDecodingContext);

            AVFrame * frameOnStack = _pFrame;
            AVFrame **frame        = &frameOnStack;

            FFmpegInvoke.av_frame_free(frame);
        }
Example #7
0
        public static void Initialize()
        {
            string ffmpegPath = string.Format(@"ffmpeg/{0}", Environment.Is64BitProcess ? "x64" : "x86");

            InteropHelper.RegisterLibrariesSearchPath(ffmpegPath);

            FFmpegInvoke.av_register_all();
            FFmpegInvoke.avcodec_register_all();
            FFmpegInvoke.avformat_network_init();
        }
Example #8
0
        public VideoDecoder()
        {
            AVCodec *pCodec = FFmpegInvoke.avcodec_find_decoder(CodecId);

            if (pCodec == null)
            {
                throw new VideoDecoderException("Unsupported codec.");
            }

            _pDecodingContext = FFmpegInvoke.avcodec_alloc_context3(pCodec);

            if (FFmpegInvoke.avcodec_open2(_pDecodingContext, pCodec, null) < 0)
            {
                throw new VideoDecoderException("Could not open codec.");
            }
        }
Example #9
0
        public bool TryDecode(ref AVPacket packet, ref AVFrame frame)
        {
            int gotPicture;

            fixed(AVPacket *pPacket = &packet)
            fixed(AVFrame * pFrame = &frame)
            {
                int decodedSize = FFmpegInvoke.avcodec_decode_video2(_pDecodingContext, pFrame, &gotPicture, pPacket);

                if (decodedSize < 0)
                {
                    Trace.TraceWarning("Error while decoding frame.");
                }
            }
            return(gotPicture == 1);
        }
Example #10
0
        public byte[] ConvertFrame(ref AVFrame frame)
        {
            if (_initialized == false)
            {
                Initialize(frame.width, frame.height, (AVPixelFormat)frame.format);

                fixed(AVFrame *pFrame = &frame)
                fixed(byte *pOutputData = &_outputData[0])
                {
                    byte **pSrcData = &(pFrame)->data_0;
                    byte **pDstData = &(_pCurrentFrame)->data_0;

                    _pCurrentFrame->data_0 = pOutputData;
                    FFmpegInvoke.sws_scale(_pContext, pSrcData, pFrame->linesize, 0, frame.height, pDstData, _pCurrentFrame->linesize);
                }
                return(_outputData);
        }
Example #11
0
        private void Initialize(int width, int height, AVPixelFormat inFormat)
        {
            _initialized = true;
            _pContext    = FFmpegInvoke.sws_getContext(width, height, inFormat, width, height, _pixelFormat, FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);
            if (_pContext == null)
            {
                throw new VideoConverterException("Could not initialize the conversion context.");
            }
            _pCurrentFrame = FFmpegInvoke.avcodec_alloc_frame();
            int outputDataSize = FFmpegInvoke.avpicture_get_size(_pixelFormat, width, height);

            _outputData = new byte[outputDataSize];
            fixed(byte *pOutputData = &_outputData[0])
            {
                FFmpegInvoke.avpicture_fill((AVPicture *)_pCurrentFrame, pOutputData, _pixelFormat, width, height);
            }
        }
Example #12
0
        public bool TryDecode(ref AVPacket packet, out AVFrame *pFrame)
        {
            int gotPicture;

            fixed(AVPacket *pPacket = &packet)
            {
                int decodedSize = FFmpegInvoke.avcodec_decode_video2(codec_context, avFrame, &gotPicture, pPacket);

                if (decodedSize < 0)
                {
                    Console.WriteLine("Error while decoding frame.");
                }
            }

            pFrame = avFrame;
            return(gotPicture == 1);
        }
Example #13
0
        public bool TryDecode(ref byte[] data, out AVFrame frame)
        {
            int gotPicture;

            frame = new AVFrame();
            fixed(byte *pData = &data[0])
            fixed(AVFrame * pFrame = &frame)
            {
                var packet = new AVPacket {
                    data = pData, size = data.Length
                };
                int decodedSize = FFmpegInvoke.avcodec_decode_video2(_pDecodingContext, pFrame, &gotPicture, &packet);

                if (decodedSize < 0)
                {
                    Trace.TraceWarning("Error while decoding frame.");
                }
            }
            return(gotPicture == 1);
        }
        protected override void DisposeUnmanaged()
        {
            fixed(AVPacket *p = &_pkt)
            {
                FFmpegInvoke.av_free_packet(p);
            }

            FFmpegInvoke.avcodec_close(_codec_context);
            FFmpegInvoke.av_free(_codec_context);

            FFmpegInvoke.av_freep(&_avFrameYUV->data_0);
            fixed(AVFrame **p = &_avFrameYUV)
            {
                FFmpegInvoke.avcodec_free_frame(p);
            }

            FFmpegInvoke.av_freep(&_avFrameBGR->data_0);
            fixed(AVFrame **p = &_avFrameBGR)
            {
                FFmpegInvoke.avcodec_free_frame(p);
            }
        }
        public byte[] EncodeFrame(IntPtr rgb)
        {
            fixed(AVPacket *packet = &_pkt)
            {
                FFmpegInvoke.av_init_packet(packet);
                _pkt.data = null;
                _pkt.size = 0;

                FFmpegInvoke.avpicture_fill((AVPicture *)_avFrameBGR, (byte *)rgb, INPUT_PIXEL_FORMAT, _avFrameBGR->width, _avFrameBGR->height);
                var convertedBytes = _converter.ConvertFrame(_avFrameBGR);

                fixed(byte *yuv = &convertedBytes[0])
                {
                    FFmpegInvoke.avpicture_fill((AVPicture *)_avFrameYUV, yuv, CODEC_PIXEL_FORMAT, _avFrameYUV->width, _avFrameYUV->height);
                }

                int got_output;
                var ret = FFmpegInvoke.avcodec_encode_video2(_codec_context, packet, _avFrameYUV, &got_output);

                if (ret < 0)
                {
                    throw new Exception("Error encoding frame");
                }

                if (got_output != 0)
                {
                    byte[] arr = new byte[_pkt.size];
                    Marshal.Copy((IntPtr)_pkt.data, arr, 0, _pkt.size);
                    FFmpegInvoke.av_free_packet(packet);
                    return(arr);
                }
                else
                {
                    return(null);
                }
            }
        }
Example #16
0
        private static unsafe void Main(string[] args)
        {
            Console.WriteLine("Runnung in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32");

            // register path to ffmpeg
            switch (Environment.OSVersion.Platform)
            {
            case PlatformID.Win32NT:
            case PlatformID.Win32S:
            case PlatformID.Win32Windows:
                string ffmpegPath = string.Format(@"../../../FFmpeg/bin/windows/{0}", Environment.Is64BitProcess ? "x64" : "x86");
                InteropHelper.RegisterLibrariesSearchPath(ffmpegPath);
                break;

            case PlatformID.Unix:
            case PlatformID.MacOSX:
                string libraryPath = Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH);
                InteropHelper.RegisterLibrariesSearchPath(libraryPath);
                break;
            }

            // decode 100 frame from url or path

            //string url = @"../../sample_mpeg4.mp4";
            string url = @"http://hubblesource.stsci.edu/sources/video/clips/details/images/centaur_1.mpg";

            FFmpegInvoke.av_register_all();
            FFmpegInvoke.avcodec_register_all();
            FFmpegInvoke.avformat_network_init();


            AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context();

            if (FFmpegInvoke.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new Exception("Could not open file");
            }

            if (FFmpegInvoke.avformat_find_stream_info(pFormatContext, null) != 0)
            {
                throw new Exception("Could not find stream info");
            }

            AVStream *pStream = null;

            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new Exception("Could not found video stream");
            }

            AVCodecContext codecContext    = *(pStream->codec);
            int            width           = codecContext.width;
            int            height          = codecContext.height;
            AVPixelFormat  sourcePixFmt    = codecContext.pix_fmt;
            AVCodecID      codecId         = codecContext.codec_id;
            var            convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24;
            SwsContext *   pConvertContext = FFmpegInvoke.sws_getContext(width, height, sourcePixFmt,
                                                                         width, height, convertToPixFmt,
                                                                         FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new Exception("Could not initialize the conversion context");
            }

            var pConvertedFrame          = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame();
            int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, width, height);
            var pConvertedFrameBuffer    = (byte *)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize);

            FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height);

            AVCodec *pCodec = FFmpegInvoke.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new Exception("Unsupported codec");
            }

            // Reusing codec context from stream info,
            // as an alternative way it could look like this: (but it works not for all kind of codecs)
            // AVCodecContext* pCodecContext = FFmpegInvoke.avcodec_alloc_context3(pCodec);
            AVCodecContext *pCodecContext = &codecContext;

            if ((pCodec->capabilities & FFmpegInvoke.CODEC_CAP_TRUNCATED) == FFmpegInvoke.CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= FFmpegInvoke.CODEC_FLAG_TRUNCATED;
            }

            if (FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            AVFrame *pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame();

            var       packet  = new AVPacket();
            AVPacket *pPacket = &packet;

            FFmpegInvoke.av_init_packet(pPacket);

            int frameNumber = 0;

            while (frameNumber < 100)
            {
                if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0)
                {
                    throw new Exception("Could not read frame");
                }

                if (pPacket->stream_index != pStream->index)
                {
                    continue;
                }

                int gotPicture = 0;
                int size       = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket);
                if (size < 0)
                {
                    throw new Exception(string.Format("Error while decoding frame {0}", frameNumber));
                }

                if (gotPicture == 1)
                {
                    Console.WriteLine("frame: {0}", frameNumber);

                    byte **src = &pDecodedFrame->data_0;
                    byte **dst = &pConvertedFrame->data_0;
                    FFmpegInvoke.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0,
                                           height, dst, pConvertedFrame->linesize);

                    byte *convertedFrameAddress = pConvertedFrame->data_0;

                    var imageBufferPtr = new IntPtr(convertedFrameAddress);

                    int linesize = pConvertedFrame->linesize[0];
                    using (var bitmap = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr))
                    {
                        bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);
                    }

                    frameNumber++;

                    System.Threading.Thread.Sleep(1000);
                }
            }

            FFmpegInvoke.av_free(pConvertedFrame);
            FFmpegInvoke.av_free(pConvertedFrameBuffer);
            FFmpegInvoke.sws_freeContext(pConvertContext);

            FFmpegInvoke.av_free(pDecodedFrame);
            FFmpegInvoke.avcodec_close(pCodecContext);
            FFmpegInvoke.avformat_close_input(&pFormatContext);
        }
Example #17
0
 protected override void DisposeOverride()
 {
     FFmpegInvoke.avcodec_close(_pDecodingContext);
 }
Example #18
0
 static VideoDecoder()
 {
     FFmpegInvoke.av_register_all();
     FFmpegInvoke.avcodec_register_all();
 }
Example #19
0
        public unsafe Mat GetFrame()
        {
            if (ProcessedFrame)
            {
                return(Frame);
            }

            AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context();

            if (FFmpegInvoke.avformat_open_input(&pFormatContext, InputLocation, null, null) != 0)
            {
                throw new Exception("Failed to open " + InputLocation);
            }

            AVStream *pStream = null;

            for (int iStream = 0; iStream < pFormatContext->nb_streams; iStream++)
            {
                if (pFormatContext->streams[iStream]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[iStream];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new Exception("No video stream found in " + InputLocation);
            }
            AVCodecContext *pCodecContext = pStream->codec;
            AVCodecID       CodecID       = pCodecContext->codec_id;
            AVCodec *       pCodec        = FFmpegInvoke.avcodec_find_decoder(CodecID);

            if (pCodec == null)
            {
                throw new Exception("Unsupported codec in " + InputLocation);
            }


            FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null);

            var       packet  = new AVPacket();
            AVPacket *pPacket = &packet;

            FFmpegInvoke.av_init_packet(pPacket);


            //Read into pPacket and check.
            while (pPacket->stream_index != pStream->index)
            {
                if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0)
                {
                    throw new Exception("Failed reading frame in " + InputLocation);
                }
            }

            //Do the video decode.
            AVFrame *pFrame = FFmpegInvoke.avcodec_alloc_frame();


            //Try to seek to first keyframe.
            //FFmpegInvoke.av_seek_frame(pFormatContext, pStream->index, pFormatContext->start_time, 0);

            int GotPicture = 0;
            int Attempts   = 0;

            //bool GoodFrame = false;

            while (GotPicture != 1 && Attempts < 100)
            {
                while (pPacket->stream_index != pStream->index)
                {
                    if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0)
                    {
                        throw new Exception(String.Format("Failed reading frame in {0}, frame:{1}", InputLocation, Attempts + 1));
                    }
                    //This should check for keyframes...
                    if (pPacket->flags == 0)
                    {
                        Attempts++; continue;
                    }
                }

                int Status = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pFrame, &GotPicture, pPacket);
                if (Status <= 0)
                {
                    throw new Exception(string.Format("Error decoding video in {0} frame:{1}", InputLocation, Attempts + 1));
                }

                Attempts++;

                if (GotPicture == 1)
                {
                    Console.WriteLine("Successfully decoded a frame at " + Attempts);
                }
            }
            if (GotPicture == 0)
            {
                throw new Exception("Failed to decode a good frame in " + InputLocation);
            }

            //Conversion to Mat
            //http://stackoverflow.com/questions/29263090/ffmpeg-avframe-to-opencv-mat-conversion
            //

            int FrameHeight = pFrame->height;
            int FrameWidth  = pFrame->width;

            SwsContext *pConvertContext = FFmpegInvoke.sws_getContext(FrameWidth,
                                                                      FrameHeight,
                                                                      pCodecContext->pix_fmt,
                                                                      FrameWidth,
                                                                      FrameHeight,
                                                                      AVPixelFormat.PIX_FMT_BGR24,
                                                                      FFmpegInvoke.SWS_FAST_BILINEAR,
                                                                      null, null, null);

            if (pConvertContext == null)
            {
                throw new Exception("Failed to initialise Mat conversion context.");
            }

            var pConvertedFrame = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame();

            //Setup the converted frame
            int ConvertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(AVPixelFormat.PIX_FMT_BGR24, FrameWidth, FrameHeight);
            var pConvertedFrameBuffer    = (byte *)FFmpegInvoke.av_malloc((uint)ConvertedFrameBufferSize);

            if (FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, AVPixelFormat.PIX_FMT_BGR24, FrameWidth, FrameHeight) < 0)
            {
                throw new Exception("Failed to setup conversion frame");
            }

            byte **src = &pFrame->data_0;
            byte **dst = &pConvertedFrame->data_0;

            FFmpegInvoke.sws_scale(pConvertContext, src, pFrame->linesize, 0, FrameHeight,
                                   dst, pConvertedFrame->linesize);

            var imageBufferPtr = new IntPtr(pConvertedFrame->data_0);

            Frame = new Mat(FrameHeight, FrameWidth, Emgu.CV.CvEnum.DepthType.Cv8U, 3, imageBufferPtr, *pConvertedFrame->linesize);

            //Cleanup
            FFmpegInvoke.av_free(pFrame);
            FFmpegInvoke.av_free(pConvertedFrame);
            FFmpegInvoke.sws_freeContext(pConvertContext);
            FFmpegInvoke.avcodec_close(pCodecContext);
            FFmpegInvoke.avformat_close_input(&pFormatContext);

            ProcessedFrame = true;
            return(Frame);
        }
Example #20
0
        private static unsafe void Main(string[] args)
        {
            // register LD_LIBRARY_PATH on window
            InteropHelper.RegisterLibrariesSearchPath(Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH));

            string url = @"http://www.fileformat.info/format/mpeg/sample/031699cb978244b8a3adf1e81cb2ac7c/download";

            FFmpegInvoke.av_register_all();
            FFmpegInvoke.avcodec_register_all();
            FFmpegInvoke.avformat_network_init();


            AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context();

            if (FFmpegInvoke.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new Exception("Could not open file");
            }

            if (FFmpegInvoke.av_find_stream_info(pFormatContext) != 0)
            {
                throw new Exception("Could not find stream info");
            }

            AVStream *pStream = null;

            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[0]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[0];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new Exception("Could not found video stream");
            }

            AVCodecContext codecContext    = *(pStream->codec);
            int            width           = codecContext.width;
            int            height          = codecContext.height;
            AVPixelFormat  sourcePixFmt    = codecContext.pix_fmt;
            AVCodecID      codecId         = codecContext.codec_id;
            var            convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24;
            SwsContext *   pConvertContext = FFmpegInvoke.sws_getContext(width, height, sourcePixFmt,
                                                                         width, height, convertToPixFmt,
                                                                         FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new Exception("Could not initialize the conversion context");
            }

            var pConvertedFrame          = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame();
            int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, width, height);
            var pConvertedFrameBuffer    = (byte *)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize);

            FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height);

            AVCodec *pCodec = FFmpegInvoke.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new Exception("Unsupported codec");
            }

            AVCodecContext *pCodecContext = FFmpegInvoke.avcodec_alloc_context3(pCodec);

            if ((pCodec->capabilities & FFmpegInvoke.CODEC_CAP_TRUNCATED) == FFmpegInvoke.CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= FFmpegInvoke.CODEC_FLAG_TRUNCATED;
            }

            if (FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            AVFrame *pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame();

            var       packet  = new AVPacket();
            AVPacket *pPacket = &packet;

            FFmpegInvoke.av_init_packet(pPacket);

            int frameNumber = 0;

            while (frameNumber < 100)
            {
                Console.WriteLine("frame: {0}", frameNumber);

                if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0)
                {
                    throw new Exception("Could not read frame");
                }

                if (pPacket->stream_index != pStream->index)
                {
                    continue;
                }

                int gotPicture = 0;
                int size       = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket);
                if (size < 0)
                {
                    throw new Exception(string.Format("Error while decoding frame {0}", frameNumber));
                }

                if (gotPicture == 1)
                {
                    byte **src = &pDecodedFrame->data_0;
                    byte **dst = &pConvertedFrame->data_0;
                    FFmpegInvoke.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0,
                                           height, dst, pConvertedFrame->linesize);

                    byte *convertedFrameAddress = pConvertedFrame->data_0;

                    var imageBufferPtr = new IntPtr(convertedFrameAddress);

                    using (var bitmap = new Bitmap(width, height, pConvertedFrame->linesize[0], PixelFormat.Format24bppRgb, imageBufferPtr))
                    {
                        bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);
                    }
                }
                frameNumber++;
            }

            FFmpegInvoke.av_free(pConvertedFrame);
            FFmpegInvoke.av_free(pConvertedFrameBuffer);
            FFmpegInvoke.sws_freeContext(pConvertContext);

            FFmpegInvoke.av_free(pDecodedFrame);
            FFmpegInvoke.avcodec_close(pCodecContext);
            FFmpegInvoke.avformat_close_input(&pFormatContext);
        }
        public VideoEncoder(int width, int height, int fps)
        {
            _converter = new VideoConverter(CODEC_PIXEL_FORMAT);

            AVCodec *codec = FFmpegInvoke.avcodec_find_encoder(CODEC_ID);

            if (codec == null)
            {
                throw new Exception("Codec not found");
            }

            _codec_context = FFmpegInvoke.avcodec_alloc_context3(codec);
            if (_codec_context == null)
            {
                throw new Exception("Could not allocate video codec context");
            }

            _codec_context->bit_rate  = 50000;
            _codec_context->width     = width;
            _codec_context->height    = height;
            _codec_context->time_base = new AVRational()
            {
                num = 1, den = fps
            };
            _codec_context->gop_size     = 10; // emit one intra frame every ten frames
            _codec_context->max_b_frames = 1;
            _codec_context->pix_fmt      = CODEC_PIXEL_FORMAT;
            FFmpegInvoke.av_opt_set(_codec_context->priv_data, "preset", "fast", 0);
            if (FFmpegInvoke.avcodec_open2(_codec_context, codec, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            _avFrameYUV = FFmpegInvoke.avcodec_alloc_frame();
            if (_avFrameYUV == null)
            {
                throw new Exception("Could not allocate video frame");
            }
            _avFrameYUV->format = (int)CODEC_PIXEL_FORMAT;
            _avFrameYUV->width  = width;
            _avFrameYUV->height = height;

            var ret1 = FFmpegInvoke.av_image_alloc(&_avFrameYUV->data_0, _avFrameYUV->linesize, width, height, CODEC_PIXEL_FORMAT, 32);

            if (ret1 < 0)
            {
                throw new Exception("Could not allocate raw picture buffer");
            }

            _avFrameBGR = FFmpegInvoke.avcodec_alloc_frame();
            if (_avFrameBGR == null)
            {
                throw new Exception("Could not allocate video frame");
            }
            _avFrameBGR->format = (int)INPUT_PIXEL_FORMAT;
            _avFrameBGR->width  = width;
            _avFrameBGR->height = height;

            var ret2 = FFmpegInvoke.av_image_alloc(&_avFrameBGR->data_0, _avFrameBGR->linesize, width, height, INPUT_PIXEL_FORMAT, 32);

            if (ret2 < 0)
            {
                throw new Exception("Could not allocate raw picture buffer");
            }
        }