예제 #1
0
        public DecoderStream(MediaFile file, ref AVStream stream)
        {
            // Initialize instance variables
            m_disposed = false;
            m_position = m_bufferUsedSize = 0;
            m_file     = file;
            m_avStream = stream;

            m_avCodecCtx = *m_avStream.codec;

            // Open the decoding codec
            AVCodec *avCodec = FFmpeg.avcodec_find_decoder(m_avCodecCtx.codec_id);

            if (avCodec == null)
            {
                throw new DecoderException("No decoder found");
            }

            if (FFmpeg.avcodec_open(ref m_avCodecCtx, avCodec) < 0)
            {
                throw new DecoderException("Error opening codec");
            }

            m_codecOpen = true;
        }
예제 #2
0
        public void TestEncode()
        {
            AVLog.Callback += (level, msg) => Console.WriteLine(level + ": " + msg);
            AVLog.Level     = (int)AVLogLevel.Trace;

            _codecContext.Open2(AVCodec.FindDecoder(_codecContext.CodecId));
            AVPacket packet;
            var      frame = new AVFrame();

            var codec         = AVCodec.FindEncoderByName("png");
            var encodeContext = new AVCodecContext(codec)
            {
                PixelFormat = AVPixelFormat.RGB24,
                Width       = _codecContext.Width,
                Height      = _codecContext.Height
            };

            encodeContext.Open2(codec);

            var convContext = SWSContext.GetContext(_codecContext.Width, _codecContext.Height, AVPixelFormat.YUV420P,
                                                    AVPixelFormat.RGB24);
            var convBuffer = new AVPicture(AVPixelFormat.RGB24, _codecContext.Width, _codecContext.Height).AsFrame();

            int frameCounter = 0;
            int readCounter  = 0;

            while ((packet = _formatContext.ReadFrame()) != null)
            {
                if (packet.StreamIndex != 0)
                {
                    continue;
                }
                bool pic = _codecContext.DecodeVideo2(packet, frame);
                if (!pic)
                {
                    continue;
                }

                readCounter++;
                convContext.Scale(frame, convBuffer, 0, _codecContext.Height);

                var outPacket = encodeContext.EncodeVideo2(convBuffer);
                if (outPacket != null)
                {
                    frameCounter++;

                    // for checking the frames, not part of normal test
                    //var data = new byte[outPacket.Size];
                    //Marshal.Copy(outPacket.Data, data, 0, data.Length);
                    //File.WriteAllBytes((frameCounter++) + ".png", data);
                }
            }

            Assert.AreEqual(readCounter, frameCounter);
            Assert.AreEqual(245, readCounter);

            encodeContext.Close();
            _codecContext.Close();
        }
예제 #3
0
 public static bool DecodeVideo2(AVCodecContext codecContext, AVFrame frame, out bool frameFinished, AVPacket packet)
 {
     int ffNum = 0;
     bool ok =
         FFmpegInvoke.avcodec_decode_video2(codecContext.NativeObj, frame.NativeObj, &ffNum, packet.NativeObj) >=
         0;
     frameFinished = ffNum > 0;
     return ok;
 }
예제 #4
0
        protected Format(AVCodecID eCodecID, IntPtr pAVCC, byte nThreads, AVFieldOrder eFieldsOrder)
            : this()
        {
            helper.Initialize();
            _pCodec     = NULL;
            nBufferSize = 0;
            int nResult = 0;

            _bEncode        = false;
            pAVCodecContext = pAVCC;
            _bAVCodecContextAllocationInternal = false;
            AVMediaType eAVMediaType = AVMediaType.AVMEDIA_TYPE_UNKNOWN;

            if (NULL != pAVCodecContext)
            {
                stAVCodecContext = (AVCodecContext)Marshal.PtrToStructure(pAVCodecContext, typeof(AVCodecContext));
                eAVMediaType     = stAVCodecContext.codec_type;
            }

            if (AVMediaType.AVMEDIA_TYPE_UNKNOWN == eAVMediaType)
            {
                if (CodecIDRawGet() != eCodecID)
                {
                    //if (AVCodecID.CODEC_ID_H264_MOBILE == eCodecID)
                    //	eCodecID = AVCodecID.CODEC_ID_H264;
                    if (NULL == (_pCodec = Functions.avcodec_find_encoder(eCodecID)))
                    {
                        throw new Exception("can't find codec " + eCodecID.ToString());
                    }
                }
                if (NULL == pAVCodecContext)
                {
                    //lock (helper._oSyncRootGlobal)
                    {
                        pAVCodecContext = Functions.avcodec_alloc_context3(_pCodec);
                        _bAVCodecContextAllocationInternal = true;
                    }
                }
                else
                {
                    //lock (helper._oSyncRootGlobal)
                    nResult = Functions.avcodec_get_context_defaults3(pAVCodecContext, _pCodec);
                }
                stAVCodecContext          = (AVCodecContext)Marshal.PtrToStructure(pAVCodecContext, typeof(AVCodecContext));
                stAVCodecContext.codec_id = eCodecID;
                _bEncode = true;
            }
            if (1 > nThreads)
            {
                nThreads = (byte)Environment.ProcessorCount;
            }
            stAVCodecContext.thread_count = nThreads;
            stAVCodecContext.field_order  = eFieldsOrder;
            Marshal.StructureToPtr(stAVCodecContext, pAVCodecContext, true);
        }
예제 #5
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="avctx"></param>
        /// <returns></returns>
        override public int close(AVCodecContext avctx)
        {
            var c = (BMPContext)avctx.priv_data;

            if (!c.picture.data[0].IsNull)
            {
                avctx.release_buffer(avctx, c.picture);
            }

            return(0);
        }
예제 #6
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="avctx"></param>
        /// <returns></returns>
        public override int close(AVCodecContext avctx)
        {
            var s = (TargaContext)avctx.priv_data;

            if (!s.picture.data[0].IsNull)
            {
                avctx.release_buffer(avctx, s.picture);
            }

            return(0);
        }
예제 #7
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="avctx"></param>
        /// <returns></returns>
        override public int init(AVCodecContext avctx)
        {
            if (avctx.priv_data == null)
            {
                avctx.priv_data = new BMPContext();
            }
            var s = (BMPContext)avctx.priv_data;

            Functions.avcodec_get_frame_defaults(s.picture);

            avctx.coded_frame = s.picture;

            return(0);
        }
예제 #8
0
        private object Decode <TType>(byte[] Data, AVCodec AVCodec, Action <AVCodecContext, AVPacket, TType> Action)
        {
            var context = new AVCodecContext();
            var packet  = new AVPacket();

            packet.data = Pointer <byte> .Create(new AllocatedMemory(Data));

            packet.size = Data.Length;

            context.get_buffer = (AVCodecContext, AVFrame) =>
            {
                var width  = AVCodecContext.width;
                var height = AVCodecContext.height;
                AVFrame.linesize[0] = width * 4;
                AVFrame.data[0]     = CLib.malloc(AVFrame.linesize[0] * height);
                return(0);
            };

            context.release_buffer = (AVCodecContext, AVFrame) =>
            {
                CLib.free(AVFrame.data[0]);
            };

            AVCodec.init(context);
            try
            {
                object obj = null;
                if (AVCodec.decode(context, ref obj, packet) < 0)
                {
                    throw(new Exception());
                }
                Action(context, packet, (TType)obj);
                return(obj);
            }
            finally
            {
                AVCodec.close(context);
            }
        }
예제 #9
0
파일: log.cs 프로젝트: ilyi1116/ffmpeg.net
 static public void av_log(AVCodecContext avctx, int Level, string Format, params object[] Params)
 {
     Console.Error.WriteLine("{0}", Format);
     //throw new NotImplementedException();
 }
예제 #10
0
        public AudioEncoderStream(string Filename, EncoderInformation EncoderInfo)
        {
            // Initialize instance variables
            m_filename = Filename;
            m_disposed = m_fileOpen = false;
            m_buffer   = new FifoMemoryStream();

            // Open FFmpeg
            FFmpeg.av_register_all();

            // Initialize the output format context
            m_avFormatCtx = FFmpeg.av_alloc_format_context();

            // Get output format
            m_avFormatCtx.oformat = FFmpeg.guess_format(EncoderInfo.Codec.ShortName, null, null);
            if (m_avFormatCtx.oformat != null)
            {
                throw new EncoderException("Could not find output format.");
            }

            FFmpeg.av_set_parameters(ref m_avFormatCtx, null);

            // Initialize the new output stream
            AVStream *stream = FFmpeg.av_new_stream(ref m_avFormatCtx, 1);

            if (stream == null)
            {
                throw new EncoderException("Could not alloc output audio stream");
            }

            m_avStream = *stream;

            // Initialize output codec context
            m_avCodecCtx = *m_avStream.codec;

            m_avCodecCtx.codec_id        = EncoderInfo.Codec.CodecID;
            m_avCodecCtx.codec_type      = CodecType.CODEC_TYPE_AUDIO;
            m_avCodecCtx.sample_rate     = EncoderInfo.SampleRate;
            m_avCodecCtx.channels        = EncoderInfo.Channels;
            m_avCodecCtx.bits_per_sample = EncoderInfo.SampleSize;
            m_avCodecCtx.bit_rate        = EncoderInfo.Bitrate;

            if (EncoderInfo.VBR)
            {
                m_avCodecCtx.flags         |= FFmpeg.CODEC_FLAG_QSCALE;
                m_avCodecCtx.global_quality = EncoderInfo.FFmpegQualityScale;
            }

            // Open codec
            AVCodec *outCodec = FFmpeg.avcodec_find_encoder(m_avCodecCtx.codec_id);

            if (outCodec == null)
            {
                throw new EncoderException("Could not find encoder");
            }

            if (FFmpeg.avcodec_open(ref m_avCodecCtx, outCodec) < 0)
            {
                throw new EncoderException("Could not open codec.");
            }

            // Open and prep file
            if (FFmpeg.url_fopen(ref m_avFormatCtx.pb, m_filename, FFmpeg.URL_WRONLY) < 0)
            {
                throw new EncoderException("Could not open output file.");
            }

            m_fileOpen = true;

            FFmpeg.av_write_header(ref m_avFormatCtx);
        }
예제 #11
0
            override public Frame[] Convert(Format cFormatTarget, Frame cFrameSource)             //в pAVFrameSource лежат байты в формате this!!!
            {
                List <Frame> aRetVal = new List <Frame>();

                if (null == cFormatTarget || !(cFormatTarget is Format.Video))
                {
                    throw new Exception("target format is null or has a wrong type");
                }
                Format.Video cFormatVideoTarget = (Format.Video)cFormatTarget;
                try
                {
                    if (ePixelFormat == cFormatVideoTarget.ePixelFormat && nHeight == cFormatVideoTarget.nHeight && nWidth == cFormatVideoTarget.nWidth)
                    {
                        return new Frame[] { new Frame(cFrameSource.aBytesCopy)
                                             {
                                                 nPTS = cFrameSource.nPTS, bKeyframe = cFrameSource.bKeyframe
                                             } }
                    }
                    ;
                    if (eCodecID == cFormatTarget.eCodecID || NULL != _pCodec)
                    {
                        throw new NotImplementedException(); //TODO доделать конверт из encoded в raw
                    }
                    cFrameSource = Transform(cFormatVideoTarget, cFrameSource);

                    int nSize;
                    if (NULL == cFrameSource)
                    {
                        (new Logger()).WriteWarning("Format.Video.Convert: IntPtr.Zero == cFrameSource.AVFrameGet()");
                    }
                    if (NULL == cFormatVideoTarget.pAVCodecContext)
                    {
                        (new Logger()).WriteWarning("Format.Video.Convert: IntPtr.Zero == cFormatVideoTarget.pAVCodecContext");
                    }
                    if (null == _cFrame)
                    {
                        _cFrame      = new Frame(cFormatVideoTarget.nBufferSize);
                        _cFrame.nPTS = 0;
                    }
                    cFrameSource.nPTS = _cFrame.nPTS;
                    nSize             = Functions.avcodec_encode_video(cFormatVideoTarget.pAVCodecContext, _cFrame.aBuffer, _cFrame.nLengthBuffer, cFrameSource.pBytes);
                    if (0 > nSize)
                    {
                        throw new Exception("video encoding failed:" + nSize);
                    }
                    if (0 < nSize)
                    {
                        aRetVal.Add(new Frame(null, _cFrame.aBuffer.Take(nSize).ToArray()));

                        AVCodecContext stAVCodecContext = (AVCodecContext)Marshal.PtrToStructure(cFormatVideoTarget.pAVCodecContext, typeof(AVCodecContext));
                        if (NULL != stAVCodecContext.coded_frame)
                        {
                            AVFrame cAVFrame = (AVFrame)Marshal.PtrToStructure(stAVCodecContext.coded_frame, typeof(AVFrame));
                            aRetVal[0].nPTS      = cAVFrame.pts;
                            aRetVal[0].bKeyframe = 0 < cAVFrame.key_frame;
                        }
                    }
                    _cFrame.nPTS++;
                }
                catch (Exception ex)
                {
                    (new Logger()).WriteError(ex);
                }
                return(aRetVal.ToArray());
            }
예제 #12
0
 public static void Close(AVCodecContext codecContext)
 {
     FFmpegInvoke.avcodec_close(codecContext.NativeObj);
 }
예제 #13
0
 static public void avcodec_set_dimensions(AVCodecContext s, int width, int height)
 {
     s.width  = width;
     s.height = height;
 }
예제 #14
0
 public static bool Open2(AVCodecContext context, AVCodec codec)
 {
     return FFmpegInvoke.avcodec_open2(context.NativeObj, codec.NativeObj, null) >= 0;
 }
예제 #15
0
 public void Setup()
 {
     _formatContext = AVFormat.OpenInput("test.ts");
     _formatContext.FindStreamInfo();
     _codecContext = _formatContext.GetStream(0).CodecContext;
 }
예제 #16
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="avctx"></param>
        /// <param name="outdata"></param>
        /// <param name="outdata_size"></param>
        /// <param name="avpkt"></param>
        /// <returns></returns>
        public override int decode(AVCodecContext avctx, ref object outputData, AVPacket avpkt)
        {
            Pointer <byte> buf = avpkt.data;
            int            buf_size = avpkt.size;
            BMPContext     s = (BMPContext)avctx.priv_data;
            AVFrame        p = s.picture;
            uint           fsize, hsize;
            int            width, height;
            int            depth;
            BiCompression  comp;
            uint           ihsize;
            int            i, n, linesize;
            var            rgb   = new uint[3];
            uint           alpha = 0;
            Pointer <byte> ptr;
            int            dsize;
            Pointer <byte> buf0 = buf;

            if (buf_size < 14)
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "buf size too small (%d)\n", buf_size);
                return(-1);
            }

            if (bytestream.get_byte(ref buf) != 'B' || bytestream.get_byte(ref buf) != 'M')
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "bad magic number\n");
                return(-1);
            }

            fsize = bytestream.get_le32(ref buf);

            if (buf_size < fsize)
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "not enough data (%d < %d), trying to decode anyway\n", buf_size, fsize);
                fsize = (uint)buf_size;
            }

            buf += 2;                              /* reserved1 */
            buf += 2;                              /* reserved2 */

            hsize  = bytestream.get_le32(ref buf); /* header size */
            ihsize = bytestream.get_le32(ref buf); /* more header size */

            if (ihsize + 14 > hsize)
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "invalid header size %d\n", hsize);
                return(-1);
            }

            /* sometimes file size is set to some headers size, set a real size in that case */
            if (fsize == 14 || fsize == ihsize + 14)
            {
                fsize = (uint)(buf_size - 2);
            }

            if (fsize <= hsize)
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "declared file size is less than header size (%d < %d)\n", fsize, hsize);
                return(-1);
            }

            switch (ihsize)
            {
            case  40:                     // windib
            case  56:                     // windib v3
            case  64:                     // OS/2 v2
            case 108:                     // windib v4
            case 124:                     // windib v5
                width  = (int)bytestream.get_le32(ref buf);
                height = (int)bytestream.get_le32(ref buf);
                break;

            case  12:                     // OS/2 v1
                width  = (int)bytestream.get_le16(ref buf);
                height = (int)bytestream.get_le16(ref buf);
                break;

            default:
                log.av_log(avctx, log.AV_LOG_ERROR, "unsupported BMP file, patch welcome\n");
                return(-1);
            }

            if (bytestream.get_le16(ref buf) != 1)             /* planes */
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "invalid BMP header\n");
                return(-1);
            }

            depth = bytestream.get_le16(ref buf);

            if (ihsize == 40 || ihsize == 64 || ihsize == 56)
            {
                comp = (BiCompression)bytestream.get_le32(ref buf);
            }
            else
            {
                comp = BiCompression.BMP_RGB;
            }

            if (comp != BiCompression.BMP_RGB && comp != BiCompression.BMP_BITFIELDS && comp != BiCompression.BMP_RLE4 && comp != BiCompression.BMP_RLE8)
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "BMP coding %d not supported\n", comp);
                return(-1);
            }

            if (comp == BiCompression.BMP_BITFIELDS)
            {
                buf   += 20;
                rgb[0] = bytestream.get_le32(ref buf);
                rgb[1] = bytestream.get_le32(ref buf);
                rgb[2] = bytestream.get_le32(ref buf);
                if (ihsize >= 108)
                {
                    alpha = bytestream.get_le32(ref buf);
                }
            }


            avctx.width  = width;
            avctx.height = height > 0 ? height : -height;

            avctx.pix_fmt = PixelFormat.PIX_FMT_NONE;

            switch (depth)
            {
            case 32:
                if (comp == BiCompression.BMP_BITFIELDS)
                {
                    if (rgb[0] == 0xFF000000 && rgb[1] == 0x00FF0000 && rgb[2] == 0x0000FF00)
                    {
                        avctx.pix_fmt = (alpha != 0) ? PixelFormat.PIX_FMT_ABGR : PixelFormat.PIX_FMT_0BGR;
                    }
                    else if (rgb[0] == 0x00FF0000 && rgb[1] == 0x0000FF00 && rgb[2] == 0x000000FF)
                    {
                        avctx.pix_fmt = (alpha != 0) ? PixelFormat.PIX_FMT_BGRA : PixelFormat.PIX_FMT_BGR0;
                    }
                    else if (rgb[0] == 0x0000FF00 && rgb[1] == 0x00FF0000 && rgb[2] == 0xFF000000)
                    {
                        avctx.pix_fmt = (alpha != 0) ? PixelFormat.PIX_FMT_ARGB : PixelFormat.PIX_FMT_0RGB;
                    }
                    else if (rgb[0] == 0x000000FF && rgb[1] == 0x0000FF00 && rgb[2] == 0x00FF0000)
                    {
                        avctx.pix_fmt = (alpha != 0) ? PixelFormat.PIX_FMT_RGBA : PixelFormat.PIX_FMT_RGB0;
                    }
                    else
                    {
                        log.av_log(avctx, log.AV_LOG_ERROR, "Unknown bitfields %0X %0X %0X\n", rgb[0], rgb[1], rgb[2]);
                        return(error.AVERROR(error.EINVAL));
                    }
                }
                else
                {
                    avctx.pix_fmt = PixelFormat.PIX_FMT_BGRA;
                }
                break;

            case 24:
                avctx.pix_fmt = PixelFormat.PIX_FMT_BGR24;
                break;

            case 16:
                if (comp == BiCompression.BMP_RGB)
                {
                    avctx.pix_fmt = PixelFormat.PIX_FMT_RGB555;
                }
                else if (comp == BiCompression.BMP_BITFIELDS)
                {
                    if (rgb[0] == 0xF800 && rgb[1] == 0x07E0 && rgb[2] == 0x001F)
                    {
                        avctx.pix_fmt = PixelFormat.PIX_FMT_RGB565;
                    }
                    else if (rgb[0] == 0x7C00 && rgb[1] == 0x03E0 && rgb[2] == 0x001F)
                    {
                        avctx.pix_fmt = PixelFormat.PIX_FMT_RGB555;
                    }
                    else if (rgb[0] == 0x0F00 && rgb[1] == 0x00F0 && rgb[2] == 0x000F)
                    {
                        avctx.pix_fmt = PixelFormat.PIX_FMT_RGB444;
                    }
                    else
                    {
                        log.av_log(avctx, log.AV_LOG_ERROR, "Unknown bitfields %0X %0X %0X\n", rgb[0], rgb[1], rgb[2]);
                        return(error.AVERROR(error.EINVAL));
                    }
                }
                break;

            case 8:
                if (hsize - ihsize - 14 > 0)
                {
                    avctx.pix_fmt = PixelFormat.PIX_FMT_PAL8;
                }
                else
                {
                    avctx.pix_fmt = PixelFormat.PIX_FMT_GRAY8;
                }
                break;

            case 1:
            case 4:
                if (hsize - ihsize - 14 > 0)
                {
                    avctx.pix_fmt = PixelFormat.PIX_FMT_PAL8;
                }
                else
                {
                    log.av_log(avctx, log.AV_LOG_ERROR, "Unknown palette for %d-colour BMP\n", 1 << depth);
                    return(-1);
                }
                break;

            default:
                log.av_log(avctx, log.AV_LOG_ERROR, "depth %d not supported\n", depth);
                return(-1);
            }

            if (avctx.pix_fmt == PixelFormat.PIX_FMT_NONE)
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "unsupported pixel format\n");
                return(-1);
            }

            if (!p.data[0].IsNull)
            {
                avctx.release_buffer(avctx, p);
            }

            p.reference = 0;
            if (avctx.get_buffer(avctx, p) < 0)
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "get_buffer() failed\n");
                return(-1);
            }

            p.pict_type = AVPictureType.AV_PICTURE_TYPE_I;

            p.key_frame = 1;

            buf   = buf0 + hsize;
            dsize = (int)(buf_size - hsize);

            /* Line size in file multiple of 4 */
            n = (int)(((avctx.width * depth + 31) / 8) & ~3);

            if (n * avctx.height > dsize && comp != BiCompression.BMP_RLE4 && comp != BiCompression.BMP_RLE8)
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "not enough data (%d < %d)\n", dsize, n * avctx.height);
                return(-1);
            }

            // RLE may skip decoding some picture areas, so blank picture before decoding
            if (comp == BiCompression.BMP_RLE4 || comp == BiCompression.BMP_RLE8)
            {
                CLib.memset(p.data[0], 0, avctx.height * p.linesize[0]);
            }

            if (depth == 4 || depth == 8)
            {
                CLib.memset(p.data[1], 0, 1024);
            }

            if (height > 0)
            {
                ptr      = p.data[0] + (avctx.height - 1) * p.linesize[0];
                linesize = -p.linesize[0];
            }
            else
            {
                ptr      = p.data[0];
                linesize = p.linesize[0];
            }

            if (avctx.pix_fmt == PixelFormat.PIX_FMT_PAL8)
            {
                int colors = (1 << depth);
                if (ihsize >= 36)
                {
                    int t;
                    buf = buf0 + 46;
                    t   = (int)bytestream.get_le32(ref buf);
                    if (t < 0 || t > (int)(1 << depth))
                    {
                        log.av_log(avctx, log.AV_LOG_ERROR, "Incorrect number of colors - %X for bitdepth %d\n", t, depth);
                    }
                    else if (t != 0)
                    {
                        colors = t;
                    }
                }
                buf = buf0 + 14 + ihsize;                  //palette location
                if ((hsize - ihsize - 14) < (colors << 2)) // OS/2 bitmap, 3 bytes per palette entry
                {
                    for (i = 0; i < colors; i++)
                    {
                        var a = p.data[1].CastPointer <uint>();
                        a[i] = (uint)((0xff << 24) | bytestream.get_le24(ref buf));
                    }
                }
                else
                {
                    for (i = 0; i < colors; i++)
                    {
                        var a = p.data[1].CastPointer <uint>();
                        a[i] = (uint)((0xFFU << 24) | bytestream.get_le32(ref buf));
                    }
                }
                buf = buf0 + hsize;
            }

            if (comp == BiCompression.BMP_RLE4 || comp == BiCompression.BMP_RLE8)
            {
                if (height < 0)
                {
                    p.data[0]    += p.linesize[0] * (avctx.height - 1);
                    p.linesize[0] = -p.linesize[0];
                }

                //ff_msrle_decode(avctx, (AVPicture)p, depth, buf, dsize);
                Unimplemented.Mark();

                if (height < 0)
                {
                    p.data[0]    += p.linesize[0] * (avctx.height - 1);
                    p.linesize[0] = -p.linesize[0];
                }
            }
            else
            {
                switch (depth)
                {
                case 1:
                    for (i = 0; i < avctx.height; i++)
                    {
                        for (int j = 0; j < n; j++)
                        {
                            ptr[j * 8 + 0] = (byte)((buf[j] >> 7));
                            ptr[j * 8 + 1] = (byte)((buf[j] >> 6) & 1);
                            ptr[j * 8 + 2] = (byte)((buf[j] >> 5) & 1);
                            ptr[j * 8 + 3] = (byte)((buf[j] >> 4) & 1);
                            ptr[j * 8 + 4] = (byte)((buf[j] >> 3) & 1);
                            ptr[j * 8 + 5] = (byte)((buf[j] >> 2) & 1);
                            ptr[j * 8 + 6] = (byte)((buf[j] >> 1) & 1);
                            ptr[j * 8 + 7] = (byte)((buf[j] >> 0) & 1);
                        }
                        buf += n;
                        ptr += linesize;
                    }
                    break;

                case 8:
                case 24:
                case 32:
                    for (i = 0; i < avctx.height; i++)
                    {
                        //Console.WriteLine("i={0}, BytesPerRow={1}, linesize={2}", i, n, linesize);
                        CLib.memcpy(ptr, buf, n);
                        buf += n;
                        ptr += linesize;
                    }
                    break;

                case 4:
                    for (i = 0; i < avctx.height; i++)
                    {
                        for (int j = 0; j < n; j++)
                        {
                            ptr[j * 2 + 0] = (byte)((buf[j] >> 4) & 0xF);
                            ptr[j * 2 + 1] = (byte)(buf[j] & 0xF);
                        }
                        buf += n;
                        ptr += linesize;
                    }
                    break;

                case 16:
                    for (i = 0; i < avctx.height; i++)
                    {
                        Pointer <ushort> src = buf.CastPointer <ushort>();
                        Pointer <ushort> dst = ptr.CastPointer <ushort>();

                        for (int j = 0; j < avctx.width; j++)
                        {
                            dst[0] = av_bswap.av_le2ne16(src[0]);
                            src++;
                            dst++;
                        }

                        buf += n;
                        ptr += linesize;
                    }
                    break;

                default:
                    log.av_log(avctx, log.AV_LOG_ERROR, "BMP decoder is broken\n");
                    return(-1);
                }
            }

            outputData = s.picture;

            return(buf_size);
        } // decode
예제 #17
0
        public override int decode(AVCodecContext avctx, ref object outdata, AVPacket avpkt)
        {
            outdata = null;

            TargaContext s = (TargaContext)avctx.priv_data;
            //AVFrame *picture = data;
            AVFrame        p = s.picture;
            Pointer <byte> dst;
            int            stride;
            TargaCompr     compr;
            int            idlen, pal, y, w, h, bpp, flags;
            int            first_clr, colors, csize;

            bytestream2.init(ref s.gb, avpkt.data, avpkt.size);

            /* parse image header */
            idlen     = bytestream2.get_byte(ref s.gb);
            pal       = bytestream2.get_byte(ref s.gb);
            compr     = (TargaCompr)bytestream2.get_byte(ref s.gb);
            first_clr = bytestream2.get_le16(ref s.gb);
            colors    = bytestream2.get_le16(ref s.gb);
            csize     = bytestream2.get_byte(ref s.gb);
            bytestream2.skip(ref s.gb, 4);             /* 2: x, 2: y */
            w   = bytestream2.get_le16(ref s.gb);
            h   = bytestream2.get_le16(ref s.gb);
            bpp = bytestream2.get_byte(ref s.gb);

            if (bytestream2.get_bytes_left(ref s.gb) <= idlen)
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "Not enough data to read header\n");
                return(error.AVERROR_INVALIDDATA);
            }

            flags = bytestream2.get_byte(ref s.gb);

            if ((pal == 0) && ((first_clr != 0) || (colors != 0) || (csize != 0)))
            {
                log.av_log(avctx, log.AV_LOG_WARNING, "File without colormap has colormap information set.\n");
                // specification says we should ignore those value in this case
                first_clr = colors = csize = 0;
            }


            // skip identifier if any
            bytestream2.skip(ref s.gb, idlen);

            switch (bpp)
            {
            case 8:
                avctx.pix_fmt = ((TargaCompr)((int)compr & (~(int)TargaCompr.TGA_RLE)) == TargaCompr.TGA_BW) ? PixelFormat.PIX_FMT_GRAY8 : PixelFormat.PIX_FMT_PAL8;
                break;

            case 15:
            case 16:
                avctx.pix_fmt = PixelFormat.PIX_FMT_RGB555LE;
                break;

            case 24:
                avctx.pix_fmt = PixelFormat.PIX_FMT_BGR24;
                break;

            case 32:
                avctx.pix_fmt = PixelFormat.PIX_FMT_BGRA;
                break;

            default:
                log.av_log(avctx, log.AV_LOG_ERROR, "Bit depth %i is not supported\n", bpp);
                return(-1);
            }


            if (!s.picture.data[0].IsNull)
            {
                avctx.release_buffer(avctx, s.picture);
            }

            if (imgutils.av_image_check_size((uint)w, (uint)h, 0, avctx) != 0)
            {
                return(-1);
            }

            if (w != avctx.width || h != avctx.height)
            {
                Functions.avcodec_set_dimensions(avctx, w, h);
            }

            if (avctx.get_buffer(avctx, p) < 0)
            {
                log.av_log(avctx, log.AV_LOG_ERROR, "get_buffer() failed\n");
                return(-1);
            }

            if ((flags & 0x20) != 0)
            {
                dst    = p.data[0];
                stride = p.linesize[0];
            }
            else
            {
                //image is upside-down
                dst    = p.data[0] + p.linesize[0] * (h - 1);
                stride = -p.linesize[0];
            }


            if (colors != 0)
            {
                int pal_size, pal_sample_size;
                if ((colors + first_clr) > 256)
                {
                    log.av_log(avctx, log.AV_LOG_ERROR, "Incorrect palette: %i colors with offset %i\n", colors, first_clr);
                    return(-1);
                }
                switch (csize)
                {
                case 24: pal_sample_size = 3; break;

                case 16:
                case 15: pal_sample_size = 2; break;

                default:
                    log.av_log(avctx, log.AV_LOG_ERROR, "Palette entry size %i bits is not supported\n", csize);
                    return(-1);
                }
                pal_size = colors * pal_sample_size;
                if (avctx.pix_fmt != PixelFormat.PIX_FMT_PAL8)
                {
                    //should not occur but skip palette anyway
                    bytestream2.skip(ref s.gb, pal_size);
                }
                else
                {
                    int t;
                    var ppal = p.data[1].CastPointer <uint>() + first_clr;

                    if (bytestream2.get_bytes_left(ref s.gb) < pal_size)
                    {
                        log.av_log(avctx, log.AV_LOG_ERROR, "Not enough data to read palette\n");
                        return(error.AVERROR_INVALIDDATA);
                    }
                    switch (pal_sample_size)
                    {
                    case 3:
                        /* RGB24 */
                        for (t = 0; t < colors; t++)
                        {
                            ppal[0] = (0xffU << 24) | bytestream2.get_le24u(ref s.gb);
                            ppal++;
                        }
                        break;

                    case 2:
                        /* RGB555 */
                        for (t = 0; t < colors; t++)
                        {
                            var v = (uint)bytestream2.get_le16u(ref s.gb);
                            v = ((v & 0x7C00) << 9) |
                                ((v & 0x03E0) << 6) |
                                ((v & 0x001F) << 3);
                            /* left bit replication */
                            v      |= (v & 0xE0E0E0U) >> 5;
                            ppal[0] = (0xffU << 24) | v;
                            ppal++;
                        }
                        break;
                    }
                    p.palette_has_changed = 1;
                }
            }

            if ((compr & (~TargaCompr.TGA_RLE)) == TargaCompr.TGA_NODATA)
            {
                CLib.memset(p.data[0], 0, p.linesize[0] * h);
            }
            else
            {
                if ((compr & TargaCompr.TGA_RLE) != 0)
                {
                    //int res = targa_decode_rle(avctx, s, dst, w, h, stride, bpp);
                    //if (res < 0) return res;
                    throw (new NotImplementedException());
                }
                else
                {
                    var img_size = w * ((bpp + 1) >> 3);
                    if (bytestream2.get_bytes_left(ref s.gb) < img_size * h)
                    {
                        log.av_log(avctx, log.AV_LOG_ERROR, "Not enough data available for image\n");
                        return(error.AVERROR_INVALIDDATA);
                    }
                    for (y = 0; y < h; y++)
                    {
                        bytestream2.get_bufferu(ref s.gb, dst, img_size);
                        dst += stride;
                    }
                }
            }


            if ((flags & 0x10) != 0)             // right-to-left, needs horizontal flip
            {
                int x;
                for (y = 0; y < h; y++)
                {
                    var line = p.data[0].GetOffsetPointer(y * p.linesize[0]);
                    for (x = 0; x < w >> 1; x++)
                    {
                        switch (bpp)
                        {
                        case 32:
                            line.CastPointer <uint>().SwapValuesAtOffsets((x), (w - x - 1));
                            break;

                        case 24:
                            line.CastPointer <byte>().SwapValuesAtOffsets((3 * x + 0), (3 * w - 3 * x - 3));
                            line.CastPointer <byte>().SwapValuesAtOffsets((3 * x + 1), (3 * w - 3 * x - 2));
                            line.CastPointer <byte>().SwapValuesAtOffsets((3 * x + 2), (3 * w - 3 * x - 1));
                            break;

                        case 16:
                            line.CastPointer <ushort>().SwapValuesAtOffsets((x), (w - x - 1));
                            break;

                        case 8:
                            line.CastPointer <byte>().SwapValuesAtOffsets((x), (w - x - 1));
                            break;
                        }
                    }
                }
            }

            outdata = s.picture;

            return(avpkt.size);
        }
예제 #18
0
 /**
  * Flush buffers.
  * Will be called when seeking
  */
 //void (*flush)(AVCodecContext *);
 virtual public void flush(AVCodecContext avctx)
 {
     throw(new NotImplementedException());
 }
예제 #19
0
 /// <summary>
 /// int (*close)(AVCodecContext *);
 /// </summary>
 /// <param name="avctx"></param>
 /// <returns></returns>
 virtual public int close(AVCodecContext avctx)
 {
     throw(new NotImplementedException());
 }
예제 #20
0
 //int (*decode)(AVCodecContext *, void *outdata, int *outdata_size, AVPacket *avpkt);
 //public Unimplemented decode;
 //virtual public int decode(AVCodecContext avctx, Pointer<byte> outdata, ref int outdata_size, AVPacket avpkt)
 virtual public int decode(AVCodecContext avctx, ref object outdata, AVPacket avpkt)
 {
     throw (new NotImplementedException());
 }
예제 #21
0
        private static unsafe void Main(string[] args)
        {
            // register LD_LIBRARY_PATH on window
            InteropHelper.RegisterLibrariesSearchPath(Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH));

            string url = @"http://www.fileformat.info/format/mpeg/sample/031699cb978244b8a3adf1e81cb2ac7c/download";

            FFmpegInvoke.av_register_all();
            FFmpegInvoke.avcodec_register_all();
            FFmpegInvoke.avformat_network_init();


            AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context();

            if (FFmpegInvoke.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new Exception("Could not open file");
            }

            if (FFmpegInvoke.av_find_stream_info(pFormatContext) != 0)
            {
                throw new Exception("Could not find stream info");
            }

            AVStream *pStream = null;

            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[0]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[0];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new Exception("Could not found video stream");
            }

            AVCodecContext codecContext    = *(pStream->codec);
            int            width           = codecContext.width;
            int            height          = codecContext.height;
            AVPixelFormat  sourcePixFmt    = codecContext.pix_fmt;
            AVCodecID      codecId         = codecContext.codec_id;
            var            convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24;
            SwsContext *   pConvertContext = FFmpegInvoke.sws_getContext(width, height, sourcePixFmt,
                                                                         width, height, convertToPixFmt,
                                                                         FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new Exception("Could not initialize the conversion context");
            }

            var pConvertedFrame          = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame();
            int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, width, height);
            var pConvertedFrameBuffer    = (byte *)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize);

            FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height);

            AVCodec *pCodec = FFmpegInvoke.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new Exception("Unsupported codec");
            }

            AVCodecContext *pCodecContext = FFmpegInvoke.avcodec_alloc_context3(pCodec);

            if ((pCodec->capabilities & FFmpegInvoke.CODEC_CAP_TRUNCATED) == FFmpegInvoke.CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= FFmpegInvoke.CODEC_FLAG_TRUNCATED;
            }

            if (FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            AVFrame *pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame();

            var       packet  = new AVPacket();
            AVPacket *pPacket = &packet;

            FFmpegInvoke.av_init_packet(pPacket);

            int frameNumber = 0;

            while (frameNumber < 100)
            {
                Console.WriteLine("frame: {0}", frameNumber);

                if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0)
                {
                    throw new Exception("Could not read frame");
                }

                if (pPacket->stream_index != pStream->index)
                {
                    continue;
                }

                int gotPicture = 0;
                int size       = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket);
                if (size < 0)
                {
                    throw new Exception(string.Format("Error while decoding frame {0}", frameNumber));
                }

                if (gotPicture == 1)
                {
                    byte **src = &pDecodedFrame->data_0;
                    byte **dst = &pConvertedFrame->data_0;
                    FFmpegInvoke.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0,
                                           height, dst, pConvertedFrame->linesize);

                    byte *convertedFrameAddress = pConvertedFrame->data_0;

                    var imageBufferPtr = new IntPtr(convertedFrameAddress);

                    using (var bitmap = new Bitmap(width, height, pConvertedFrame->linesize[0], PixelFormat.Format24bppRgb, imageBufferPtr))
                    {
                        bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);
                    }
                }
                frameNumber++;
            }

            FFmpegInvoke.av_free(pConvertedFrame);
            FFmpegInvoke.av_free(pConvertedFrameBuffer);
            FFmpegInvoke.sws_freeContext(pConvertContext);

            FFmpegInvoke.av_free(pDecodedFrame);
            FFmpegInvoke.avcodec_close(pCodecContext);
            FFmpegInvoke.avformat_close_input(&pFormatContext);
        }
예제 #22
0
 public static bool CopyContext(AVCodecContext dest, AVCodecContext src)
 {
     return FFmpegInvoke.avcodec_copy_context(dest.NativeObj, src.NativeObj) == 0;
 }
예제 #23
0
파일: Program.cs 프로젝트: TomoNag/mpeg
        private static unsafe void Main(string[] args)
        {
            Console.WriteLine("Runnung in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32");

            // register path to ffmpeg
            switch (Environment.OSVersion.Platform)
            {
            case PlatformID.Win32NT:
            case PlatformID.Win32S:
            case PlatformID.Win32Windows:
                string ffmpegPath = string.Format(@"../../../FFmpeg/bin/windows/{0}", Environment.Is64BitProcess ? "x64" : "x86");
                InteropHelper.RegisterLibrariesSearchPath(ffmpegPath);
                break;

            case PlatformID.Unix:
            case PlatformID.MacOSX:
                string libraryPath = Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH);
                InteropHelper.RegisterLibrariesSearchPath(libraryPath);
                break;
            }

            // decode 100 frame from url or path

            //string url = @"../../sample_mpeg4.mp4";
            string url = @"http://hubblesource.stsci.edu/sources/video/clips/details/images/centaur_1.mpg";

            FFmpegInvoke.av_register_all();
            FFmpegInvoke.avcodec_register_all();
            FFmpegInvoke.avformat_network_init();


            AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context();

            if (FFmpegInvoke.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new Exception("Could not open file");
            }

            if (FFmpegInvoke.avformat_find_stream_info(pFormatContext, null) != 0)
            {
                throw new Exception("Could not find stream info");
            }

            AVStream *pStream = null;

            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new Exception("Could not found video stream");
            }

            AVCodecContext codecContext    = *(pStream->codec);
            int            width           = codecContext.width;
            int            height          = codecContext.height;
            AVPixelFormat  sourcePixFmt    = codecContext.pix_fmt;
            AVCodecID      codecId         = codecContext.codec_id;
            var            convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24;
            SwsContext *   pConvertContext = FFmpegInvoke.sws_getContext(width, height, sourcePixFmt,
                                                                         width, height, convertToPixFmt,
                                                                         FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new Exception("Could not initialize the conversion context");
            }

            var pConvertedFrame          = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame();
            int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, width, height);
            var pConvertedFrameBuffer    = (byte *)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize);

            FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height);

            AVCodec *pCodec = FFmpegInvoke.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new Exception("Unsupported codec");
            }

            // Reusing codec context from stream info,
            // as an alternative way it could look like this: (but it works not for all kind of codecs)
            // AVCodecContext* pCodecContext = FFmpegInvoke.avcodec_alloc_context3(pCodec);
            AVCodecContext *pCodecContext = &codecContext;

            if ((pCodec->capabilities & FFmpegInvoke.CODEC_CAP_TRUNCATED) == FFmpegInvoke.CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= FFmpegInvoke.CODEC_FLAG_TRUNCATED;
            }

            if (FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            AVFrame *pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame();

            var       packet  = new AVPacket();
            AVPacket *pPacket = &packet;

            FFmpegInvoke.av_init_packet(pPacket);

            int frameNumber = 0;

            while (frameNumber < 100)
            {
                if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0)
                {
                    throw new Exception("Could not read frame");
                }

                if (pPacket->stream_index != pStream->index)
                {
                    continue;
                }

                int gotPicture = 0;
                int size       = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket);
                if (size < 0)
                {
                    throw new Exception(string.Format("Error while decoding frame {0}", frameNumber));
                }

                if (gotPicture == 1)
                {
                    Console.WriteLine("frame: {0}", frameNumber);

                    byte **src = &pDecodedFrame->data_0;
                    byte **dst = &pConvertedFrame->data_0;
                    FFmpegInvoke.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0,
                                           height, dst, pConvertedFrame->linesize);

                    byte *convertedFrameAddress = pConvertedFrame->data_0;

                    var imageBufferPtr = new IntPtr(convertedFrameAddress);

                    int linesize = pConvertedFrame->linesize[0];
                    using (var bitmap = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr))
                    {
                        bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);
                    }

                    frameNumber++;

                    System.Threading.Thread.Sleep(1000);
                }
            }

            FFmpegInvoke.av_free(pConvertedFrame);
            FFmpegInvoke.av_free(pConvertedFrameBuffer);
            FFmpegInvoke.sws_freeContext(pConvertContext);

            FFmpegInvoke.av_free(pDecodedFrame);
            FFmpegInvoke.avcodec_close(pCodecContext);
            FFmpegInvoke.avformat_close_input(&pFormatContext);
        }
예제 #24
0
        /// <summary>
        /// Is invoked, when the application is started.
        /// </summary>
        /// <param name="args">The command line arguments, that were passed to the application.</param>
        public static void Main(string[] args)
        {
            // Initializes the Codecs and formats
            LibAVFormat.av_register_all();

            // Asks the user for a flie name to load
            Console.Write("File name: ");
            string fileName = Console.ReadLine();

            // Loads a video
            IntPtr formatContextPointer;

            if (LibAVFormat.avformat_open_input(out formatContextPointer, fileName, IntPtr.Zero, IntPtr.Zero) < 0)
            {
                Console.WriteLine($"An error occurred while opening the video: {fileName}.");
                return;
            }
            AVFormatContext formatContext = Marshal.PtrToStructure <AVFormatContext>(formatContextPointer);

            Console.WriteLine($"Opened video file {formatContext.filename}.");

            // Retrieve stream information of the video
            if (LibAVFormat.avformat_find_stream_info(formatContextPointer, IntPtr.Zero) < 0)
            {
                Console.WriteLine("An error occurred while retrieving the stream information of the video.");
                return;
            }

            // Finds the first video stream in the video
            Console.WriteLine($"Found {formatContext.nb_streams} stream(s) in the video file.");
            int videoStreamId = -1;

            for (int i = 0; i < formatContext.nb_streams; i++)
            {
                AVStream       stream       = Marshal.PtrToStructure <AVStream>(Marshal.PtrToStructure <IntPtr>(IntPtr.Add(formatContext.streams, i * IntPtr.Size)));
                AVCodecContext codecContext = Marshal.PtrToStructure <AVCodecContext>(stream.codec);
                if (codecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    videoStreamId = i;
                    break;
                }
            }
            if (videoStreamId == -1)
            {
                Console.WriteLine("No video stream found.");
                return;
            }
            AVStream       videoStream       = Marshal.PtrToStructure <AVStream>(Marshal.PtrToStructure <IntPtr>(IntPtr.Add(formatContext.streams, videoStreamId * IntPtr.Size)));
            AVCodecContext videoCodecContext = Marshal.PtrToStructure <AVCodecContext>(videoStream.codec);

            // Finds the decoder for the video stream
            IntPtr codecPointer = LibAVCodec.avcodec_find_decoder(videoCodecContext.codec_id);

            if (codecPointer == IntPtr.Zero)
            {
                Console.WriteLine("The video codec is not supported.");
                return;
            }
            AVCodec videoCodec = Marshal.PtrToStructure <AVCodec>(codecPointer);

            Console.WriteLine($"Using the {videoCodec.long_name} codec to decode the video stream.");

            // Opens the codec for the video stream
            if (LibAVCodec.avcodec_open2(videoStream.codec, codecPointer, IntPtr.Zero) < 0)
            {
                Console.WriteLine("The codec {videoCodec.long_name} could not be opened.");
                return;
            }
            Console.WriteLine("Successfully loaded codec.");

            // Allocates video frames for the original decoded frame and the frame in RGB (which is then later stored in a file)
            IntPtr framePointer    = LibAVUtil.av_frame_alloc();
            IntPtr frameRgbPointer = LibAVUtil.av_frame_alloc();

            // Determines the required buffer size and allocates the buffer for the RGB frame
            int    numBytes = LibAVCodec.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_RGB24, videoCodecContext.width, videoCodecContext.height);
            IntPtr buffer   = LibAVUtil.av_malloc(new UIntPtr((uint)(numBytes * sizeof(byte))));

            // Assigns appropriate parts of buffer to image planes in frameRgb, note that frameRgb is an AVFrame, but AVFrame is a superset of AVPicture
            LibAVCodec.avpicture_fill(frameRgbPointer, buffer, AVPixelFormat.AV_PIX_FMT_RGB24, videoCodecContext.width, videoCodecContext.height);
            AVFrame frameRgb = Marshal.PtrToStructure <AVFrame>(frameRgbPointer);

            // Cycles over all frames of the video and dumps the frames to file
            Console.WriteLine("Decoding vidoe frames...");
            int    frameIndex    = 0;
            IntPtr packetPointer = Marshal.AllocHGlobal(Marshal.SizeOf <AVPacket>());

            while (LibAVFormat.av_read_frame(formatContextPointer, packetPointer) >= 0)
            {
                AVPacket packet = Marshal.PtrToStructure <AVPacket>(packetPointer);
                if (packet.stream_index == videoStreamId)
                {
                    // Decodes video frame
                    int frameFinished = 0;
                    LibAVCodec.avcodec_decode_video2(videoStream.codec, framePointer, ref frameFinished, packetPointer);
                    AVFrame frame = Marshal.PtrToStructure <AVFrame>(framePointer);

                    // Checks if the video frame was properly decoded
                    if (frameFinished != 0)
                    {
                        // Converts the image from its native format to RGB
                        IntPtr scaleContextPointer = LibSwScale.sws_getContext(videoCodecContext.width, videoCodecContext.height, videoCodecContext.pix_fmt,
                                                                               videoCodecContext.width, videoCodecContext.height, AVPixelFormat.AV_PIX_FMT_RGB24, ScalingFlags.SWS_BILINEAR, IntPtr.Zero,
                                                                               IntPtr.Zero, IntPtr.Zero);
                        LibSwScale.sws_scale(scaleContextPointer, frame.data, frame.linesize, 0, videoCodecContext.height, frameRgb.data, frameRgb.linesize);
                        frameRgb = Marshal.PtrToStructure <AVFrame>(frameRgbPointer);

                        // Checks if this is one of the first 5 frames, if so then it is stored to disk
                        frameIndex++;
                        if (frameIndex > 24 && frameIndex <= 30)
                        {
                            Console.WriteLine($"Writing frame {frameIndex} to file...");
                            string frameFileName = Path.Combine(Path.GetDirectoryName(fileName), $"frame-{frameIndex}.ppm");
                            Program.SaveFrame(frameRgb, videoCodecContext.width, videoCodecContext.height, frameFileName);
                        }
                    }
                }

                // Frees the packet that was allocated by av_read_frame
                LibAVCodec.av_free_packet(packetPointer);
            }
            Console.WriteLine("Finished decoding of the video.");

            // Frees and closes all acquired resources
            LibAVUtil.av_free(buffer);
            LibAVUtil.av_free(frameRgbPointer);
            LibAVUtil.av_free(framePointer);
            LibAVCodec.avcodec_close(videoStream.codec);
            IntPtr formatContextPointerPointer = Marshal.AllocHGlobal(Marshal.SizeOf <IntPtr>());

            Marshal.StructureToPtr(formatContextPointer, formatContextPointerPointer, false);
            LibAVFormat.avformat_close_input(formatContextPointerPointer);
            Marshal.FreeHGlobal(formatContextPointerPointer);
            Console.WriteLine("Freed all acquired resources.");
        }