public AVFrame Convert(byte *pSrcData)
        {
            byte_ptrArray4 src       = new byte_ptrArray4();
            int_array4     srcStride = new int_array4();

            ffmpeg.av_image_fill_arrays(ref src, ref srcStride, pSrcData, _srcPixelFormat, _srcWidth, _srcHeight, 1).ThrowExceptionIfError();

            ffmpeg.sws_scale(_pConvertContext, src, srcStride, 0, _srcHeight, _dstData, _dstLinesize).ThrowExceptionIfError();

            var data = new byte_ptrArray8();

            data.UpdateFrom(_dstData);
            var linesize = new int_array8();

            linesize.UpdateFrom(_dstLinesize);

            return(new AVFrame
            {
                data = data,
                linesize = linesize,
                width = _dstWidth,
                height = _dstHeight,
                format = (int)_dstPixelFormat
            });
        }
Example #2
0
        public AVFrame Convert(AVFrame sourceFrame)
        {
            try
            {
                ffmpeg.sws_scale(_pConvertContext,
                                 sourceFrame.data, sourceFrame.linesize,
                                 0, sourceFrame.height,
                                 _dstData, _dstLinesize);
            }
            catch (AccessViolationException ex)
            {
                throw new AccessViolationException(ex.ToString());
            }

            var data = new byte_ptrArray8();

            data.UpdateFrom(_dstData);

            var linesize = new int_array8();

            linesize.UpdateFrom(_dstLinesize);

            return(new AVFrame
            {
                data = data,
                linesize = linesize,
                width = _destinationSize.Width,
                height = _destinationSize.Height
            });
        }
Example #3
0
        public AVFrame Convert(AVFrame sourceFrame)
        {
            ffmpeg.sws_scale(_pConvertContext,
                             sourceFrame.data, sourceFrame.linesize,
                             0, sourceFrame.height,
                             _dstData, _dstLinesize);


            var data = new byte_ptrArray8();

            data.UpdateFrom(_dstData);

            var linesize = new int_array8();

            linesize.UpdateFrom(_dstLinesize);

            return(new AVFrame
            {
                data = data,
                linesize = linesize,
                width = _destinationSize.Width,
                height = _destinationSize.Height,
                pkt_dts = sourceFrame.pkt_dts,
                pts = sourceFrame.pts,
                sample_rate = sourceFrame.sample_rate
            });
        }
Example #4
0
 public static void CopyTo(this int_array8 src, Span <int> dst)
 {
     for (uint i = 0; i < 8; i++)
     {
         dst[(int)i] = src[i];
     }
 }
    /// <summary>
    /// RTP 스트림으로부터 프레임을 받은 뒤, 픽셀 포멧을 변환하여 반환한다.
    ///
    /// Blocking 함수
    /// </summary>
    /// <returns>수신받은 프레임</returns>
    public AVFrame ReceiveFrame()
    {
        // 프레임, 패킷 레퍼런스 해제
        ffmpeg.av_frame_unref(this._frame);
        ffmpeg.av_packet_unref(this._packet);

        // 프레임 수신
        ffmpeg.av_read_frame(this._formatContext, this._packet);

        // 프레임 디코딩
        ffmpeg.avcodec_send_packet(this._codecContext, this._packet);
        ffmpeg.avcodec_receive_frame(this._codecContext, this._frame);

        // 프레임 픽셀 포멧 변환 (YUV420P -> RGB24)
        ffmpeg.sws_scale(this._convertContext,
                         this._frame->data, this._frame->linesize, 0, this._frame->height,
                         this._convertDstData, this._convertDstLinesize);

        var data = new byte_ptrArray8();

        data.UpdateFrom(this._convertDstData);
        var linesize = new int_array8();

        linesize.UpdateFrom(this._convertDstLinesize);

        return(new AVFrame
        {
            data = data,
            linesize = linesize,
            width = (int)this._frame->width,
            height = (int)this._frame->height
        });
    }
Example #6
0
        private static unsafe void EncodeImagesToH264()
        {
            var frameFiles     = Directory.GetFiles(".", "frame.*.jpg").OrderBy(x => x).ToArray();
            var fistFrameImage = Image.FromFile(frameFiles.First());

            var outputFileName         = "out.h264";
            var fps                    = 25;
            var sourceSize             = fistFrameImage.Size;
            var sourcePixelFormat      = AVPixelFormat.AV_PIX_FMT_BGR24;
            var destinationSize        = sourceSize;
            var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;

            using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
            {
                using (var fs = File.Open(outputFileName, FileMode.Create)) // be advise only ffmpeg based player (like ffplay or vlc) can play this file, for the others you need to go through muxing
                {
                    using (var vse = new H264VideoStreamEncoder(fs, fps, destinationSize))
                    {
                        var frameNumber = 0;
                        foreach (var frameFile in frameFiles)
                        {
                            byte[] bitmapData;

                            using (var frameImage = Image.FromFile(frameFile))
                                using (var frameBitmap = frameImage is Bitmap bitmap ? bitmap : new Bitmap(frameImage))
                                {
                                    bitmapData = GetBitmapData(frameBitmap);
                                }

                            fixed(byte *pBitmapData = bitmapData)
                            {
                                var data = new byte_ptrArray8 {
                                    [0] = pBitmapData
                                };
                                var linesize = new int_array8 {
                                    [0] = bitmapData.Length / sourceSize.Height
                                };
                                var frame = new AVFrame
                                {
                                    data     = data,
                                    linesize = linesize,
                                    height   = sourceSize.Height
                                };
                                var convertedFrame = vfc.Convert(frame);

                                convertedFrame.pts = frameNumber * fps;
                                vse.Encode(convertedFrame);
                            }

                            Console.WriteLine($"frame: {frameNumber}");
                            frameNumber++;
                        }
                    }
                }
            }
        }
Example #7
0
        public AVFrame Convert(byte[] srcData)
        {
            //int linesz0 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 0);
            //int linesz1 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 1);
            //int linesz2 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 2);

            byte_ptrArray4 src       = new byte_ptrArray4();
            int_array4     srcStride = new int_array4();

            fixed(byte *pSrcData = srcData)
            {
                ffmpeg.av_image_fill_arrays(ref src, ref srcStride, pSrcData, _srcPixelFormat, _srcWidth, _srcHeight, 1).ThrowExceptionIfError();
            }

            //var srcFrameData = new byte_ptrArray8 {
            //    [0] = pSrcData,
            //    [1] = (linesz1 > 0) ? pSrcData + linesz0 : null,
            //    [2] = (linesz2 > 0) ? pSrcData + linesz0 + linesz1: null,
            //};
            //var srcLinesize = new int_array8 {
            //    [0] = linesz0,
            //    [1] = linesz1,
            //    [2] = linesz2
            //};

            //AVFrame srcFrame = new AVFrame
            //{
            //    data = srcFrameData,
            //    linesize = srcLinesize,
            //    width = _srcSize.Width,
            //    height = _srcSize.Height
            //};

            //ffmpeg.sws_scale(_pConvertContext, srcFrame.data, srcFrame.linesize, 0, srcFrame.height, _dstData, _dstLinesize).ThrowExceptionIfError();

            //int outputBufferSize = ffmpeg.av_image_get_buffer_size(_dstPixelFormat, _dstSize.Width, _dstSize.Height, 1);
            //byte[] outputBuffer = new byte[outputBufferSize];

            ffmpeg.sws_scale(_pConvertContext, src, srcStride, 0, _srcHeight, _dstData, _dstLinesize).ThrowExceptionIfError();

            var data = new byte_ptrArray8();

            data.UpdateFrom(_dstData);
            var linesize = new int_array8();

            linesize.UpdateFrom(_dstLinesize);

            return(new AVFrame
            {
                data = data,
                linesize = linesize,
                width = _dstWidth,
                height = _dstHeight
            });
        }
        public AVFrame Convert(AVFrame frame)
        {
            try
            {
                int result = ffmpeg.av_frame_copy_props(&frame, _dstFrame);


                if (result >= 0)
                {
                    result = ffmpeg.sws_scale(_pConvertContext,
                                              frame.data, frame.linesize, 0, frame.height,
                                              _dstData, _dstLinesize);
                }



                if (result < 0)
                {
                    return(new AVFrame
                    {
                        width = 0,
                        height = 0
                    });
                }

                var data = new byte_ptrArray8();
                data.UpdateFrom(_dstData);
                var linesize = new int_array8();
                linesize.UpdateFrom(_dstLinesize);

                return(new AVFrame
                {
                    data = data,
                    linesize = linesize,
                    width = _dstWidth,
                    height = _dstHeight,
                    format = (int)_dstPixelFormat
                });
            }
            catch
            {
                return(new AVFrame
                {
                    width = 0,
                    height = 0
                });
            }

            //ffmpeg.sws_scale(_pConvertContext,
            //    frame.data, frame.linesize, 0, frame.height,
            //    _dstFrame->data, _dstFrame->linesize);
            //return _dstFrame;
        }
        public AVFrame Convert(byte[] srcData)
        {
            int linesz0 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 0);
            int linesz1 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 1);
            int linesz2 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 2);

            fixed(byte *pSrcData = srcData)
            {
                var srcFrameData = new byte_ptrArray8 {
                    [0] = pSrcData,
                    [1] = (linesz1 > 0) ? pSrcData + linesz0 : null,
                    [2] = (linesz2 > 0) ? pSrcData + linesz0 + linesz1: null,
                };
                var srcLinesize = new int_array8 {
                    [0] = linesz0,
                    [1] = linesz1,
                    [2] = linesz2
                };

                AVFrame srcFrame = new AVFrame
                {
                    data     = srcFrameData,
                    linesize = srcLinesize,
                    width    = _srcSize.Width,
                    height   = _srcSize.Height
                };

                ffmpeg.sws_scale(_pConvertContext, srcFrame.data, srcFrame.linesize, 0, srcFrame.height, _dstData, _dstLinesize).ThrowExceptionIfError();

                var data = new byte_ptrArray8();

                data.UpdateFrom(_dstData);
                var linesize = new int_array8();

                linesize.UpdateFrom(_dstLinesize);

                return(new AVFrame
                {
                    data = data,
                    linesize = linesize,
                    width = _dstSize.Width,
                    height = _dstSize.Height
                });
            }
        }
        private unsafe void EncodeImagesToH264()
        {
            var outputFileName    = "out.h264";
            var fps               = 25;
            var sourceSize        = new Size(1920, 1080);
            var sourcePixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;
            var destinationSize   = sourceSize;
            //var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;
            var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUVJ420P;

            byte[] bitmapData = new byte[6220800];

            //using (var frameImage = Image.FromFile(frameFile))
            //using (var frameBitmap = frameImage is Bitmap bitmap ? bitmap : new Bitmap(frameImage))
            //{
            //    bitmapData = GetBitmapData(frameBitmap);
            //}

            int frameNumber = 0;

            fixed(byte *pBitmapData = bitmapData)
            {
                var data = new byte_ptrArray8 {
                    [0] = pBitmapData
                };
                var linesize = new int_array8 {
                    [0] = _linesizeY,
                    [1] = _linesizeU,
                    [2] = _linesizeV,
                };
                var frame = new AVFrame
                {
                    data     = data,
                    linesize = linesize,
                    height   = sourceSize.Height,
                    width    = sourceSize.Width
                };

                frame.pts = frameNumber * fps;
                Encode(frame);
            }
        }
        public unsafe AVFrame Convert(AVFrame sourceFrame)
        {
            ffmpeg.sws_scale(this._pConvertContext, sourceFrame.data, sourceFrame.linesize, 0, sourceFrame.height, this._dstData, this._dstLinesize);
            byte_ptrArray8 data = default(byte_ptrArray8);

            data.UpdateFrom(this._dstData);
            int_array8 linesize = default(int_array8);

            linesize.UpdateFrom(this._dstLinesize);
            AVFrame result = default(AVFrame);

            result.data     = data;
            result.linesize = linesize;
            Size destinationSize = this._destinationSize;

            result.width    = destinationSize.Width;
            destinationSize = this._destinationSize;
            result.height   = destinationSize.Height;
            return(result);
        }
Example #12
0
        public AVFrame Convert(AVFrame sourceFrame)
        {
            int res = ffmpeg.sws_scale(_pConvertContext, sourceFrame.data, sourceFrame.linesize, 0, sourceFrame.height, _dstData, _dstLinesize);
            var v   = *_pConvertContext;

            var data = new byte_ptrArray8();

            data.UpdateFrom(_dstData);
            var linesize = new int_array8();

            linesize.UpdateFrom(_dstLinesize);

            return(new AVFrame
            {
                data = data,
                linesize = linesize,
                width = configuration.outputResolution.width,
                height = configuration.outputResolution.height
            });
        }
Example #13
0
        public AVFrame *Convert(AVFrame *framep)
        {
            if (this.IsConvert)
            {
                ffmpeg.sws_scale(convert_context, framep->data, framep->linesize, 0, framep->height, _dstData, _dstLinesize);

                AVFrame *tmp = ffmpeg.av_frame_alloc();
                tmp = ffmpeg.av_frame_alloc();
                tmp->best_effort_timestamp = framep->best_effort_timestamp;
                tmp->width  = FrameSize.Width;
                tmp->height = FrameSize.Height;
                tmp->data   = new byte_ptrArray8();
                tmp->data.UpdateFrom(_dstData);
                tmp->linesize = new int_array8();
                tmp->linesize.UpdateFrom(_dstLinesize);

                ffmpeg.av_frame_unref(framep);
                return(tmp);
            }
            else
            {
                return(framep);
            }
        }
Example #14
0
        private bool EnqueueOneFrame()
        {
            AVFrame outframe;

            try
            {
                int error;
                do
                {
                    try
                    {
                        do
                        {
                            error = ffmpeg.av_read_frame(format_context, packet);
                            if (error == ffmpeg.AVERROR_EOF)
                            {
                                DS = DecodingState.Stopped;
                                return(false);
                            }

                            if (error != 0)
                            {
                                Trace.TraceError("av_read_frame eof or error.\n");
                            }
                        } while (packet->stream_index != video_stream->index);

                        if (ffmpeg.avcodec_send_packet(codec_context, packet) < 0)
                        {
                            Trace.TraceError("avcodec_send_packet error\n");
                        }
                    }
                    finally
                    {
                        ffmpeg.av_packet_unref(packet);
                    }

                    error = ffmpeg.avcodec_receive_frame(codec_context, frame);
                } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));
                if (error != 0)
                {
                    Trace.TraceError("error.\n");
                }

                if (IsConvert)
                {
                    ffmpeg.sws_scale(convert_context, frame->data, frame->linesize, 0, frame->height, _dstData, _dstLinesize);

                    var data = new byte_ptrArray8();
                    data.UpdateFrom(_dstData);
                    var linesize = new int_array8();
                    linesize.UpdateFrom(_dstLinesize);

                    outframe = new AVFrame
                    {
                        data     = data,
                        linesize = linesize,
                        width    = FrameSize.Width,
                        height   = FrameSize.Height
                    };
                }
                else
                {
                    outframe = *frame;
                }

                Bitmap       bitmaptmp = new Bitmap(outframe.width, outframe.height, outframe.linesize[0], PixelFormat.Format32bppArgb, (IntPtr)outframe.data[0]);
                MemoryStream ms        = new MemoryStream();
                bitmaptmp.Save(ms, ImageFormat.Bmp);
                decodedframes.Enqueue(new CDecodedFrame()
                {
                    Time = (frame->best_effort_timestamp - video_stream->start_time) * ((double)video_stream->time_base.num / (double)video_stream->time_base.den) * 1000, Bitmap = ms.GetBuffer()
                });

                cts.Token.ThrowIfCancellationRequested();
                return(true);
            }
            catch (Exception e)
            {
                Trace.TraceError(e.ToString());
                DS = DecodingState.Stopped;
                return(false);
            }
            finally
            {
                ffmpeg.av_frame_unref(&outframe);
                ffmpeg.av_free(&outframe);
            }
        }
Example #15
0
        private unsafe void VideoScaleTask()
        {
            AVFrame vFrame;
            int     scalerNum = scalerId;
            int     scalePos;

            lock (this) { scalerId++; }

            while (true)
            {
                if (state == State.Stop)
                {
                    break;
                }

                if (state == State.Seek)
                {
                    scaleSeek[scalerNum] = false;
                    while (state == State.Seek)
                    {
                        Thread.Sleep(100);
                    }
                }

                lock (this)
                {
                    scalePos = -1;
                    for (int i = fPool.fPut, count = 0; count < fPool.fSize; i = (i + 1) % fPool.fSize, count++)
                    {
                        if (fPool.status[i] == FrameBuffer.eFrameStatus.F_FRAME)
                        {
                            fPool.status[i] = FrameBuffer.eFrameStatus.F_SCALING;
                            scalePos        = i;
                            break;
                        }
                    }
                }

                if (scalePos == -1)
                {
                    if (isEOF == true)
                    {
                        break;
                    }
                    Thread.Sleep(20);
                    continue;
                }

                fPool._convertedFrameBufferPtr[scalePos] = (IntPtr)ffmpeg.av_malloc((ulong)fPool.convertedFrameBufferSize);

                ffmpeg.av_image_fill_arrays(
                    ref fPool._dstData[scalePos],
                    ref fPool._dstLinesize[scalePos],
                    (byte *)fPool._convertedFrameBufferPtr[scalePos],
                    AVPixelFormat.AV_PIX_FMT_BGR24,
                    (int)frameSize.Width,
                    (int)frameSize.Height, 1);

                vFrame = fPool.vFrame[scalePos];
                ffmpeg.sws_scale(swsCtxVideo[scalerNum],
                                 vFrame.data, vFrame.linesize, 0, vFrame.height, fPool._dstData[scalePos], fPool._dstLinesize[scalePos]);

                var data = new byte_ptrArray8();
                data.UpdateFrom(fPool._dstData[scalePos]);
                var linesize = new int_array8();
                linesize.UpdateFrom(fPool._dstLinesize[scalePos]);

                AVFrame frame_converted = new AVFrame
                {
                    data     = data,
                    linesize = linesize,
                    width    = (int)frameSize.Width,
                    height   = (int)frameSize.Height
                };

                fPool.RGBFrame[scalePos] = frame_converted;

                ffmpeg.av_frame_unref(&vFrame);
                fPool.status[scalePos] = FrameBuffer.eFrameStatus.F_SCALE;
            }
        }
Example #16
0
 public IntArray(int_array8 instance)
 {
     nativeObj = instance;
 }
        static unsafe void Main(string[] args)
        {
            #region Chapter 1

            if (args.Length < 0)
            {
                Console.WriteLine($"Please enter a paramter for input file name!");
                Console.WriteLine($"ex ) {System.Reflection.Assembly.GetExecutingAssembly().GetName().Name} test.mp4");
                return;
            }

            if (!System.IO.File.Exists(args[0]))
            {
                Console.WriteLine($"The file, {args[0]} doesn't exist!");
                return;
            }

            FFmpegBinariesHelper.RegisterFFmpegBinaries();
            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

            Console.WriteLine($"The file, {args[0]} will be used.");
            var filePath = args[0];

            AVFormatContext *pFormatCtx = ffmpeg.avformat_alloc_context();
            if (ffmpeg.avformat_open_input(&pFormatCtx, filePath, null, null) != 0)
            {
                Console.WriteLine($"The file, {filePath} can't be opened!");
                return;
            }

            if (ffmpeg.avformat_find_stream_info(pFormatCtx, null) < 0)
            {
                Console.WriteLine($"Can't find streams of {filePath}!");
                return;
            }

            for (int i = 0; i < pFormatCtx->nb_streams; i++)
            {
                ffmpeg.av_dump_format(pFormatCtx, i, filePath, 0);
            }

            AVCodec *pCodec          = null;
            int      videoCodecIndex = ffmpeg.av_find_best_stream(pFormatCtx, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &pCodec, 0);

            if (videoCodecIndex == -1)
            {
                Console.WriteLine($"Can't find a video stream of {filePath}!");
                return;
            }

            AVCodecContext *pCodecCtx = ffmpeg.avcodec_alloc_context3(pCodec);

            if (pCodecCtx == null)
            {
                Console.WriteLine($"Can't find a video codec context of {filePath}!");
                return;
            }

            if (ffmpeg.avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoCodecIndex]->codecpar) < 0)
            {
                Console.WriteLine($"Can't set codec params to its context!");
                return;
            }

            if (ffmpeg.avcodec_open2(pCodecCtx, pCodec, null) < 0)
            {
                Console.WriteLine($"Can't open {filePath} with codec, {ffmpeg.avcodec_get_name(pCodec->id)}");
                return;
            }

            var codecName   = ffmpeg.avcodec_get_name(pCodec->id);
            var frameSize   = new Size(pCodecCtx->width, pCodecCtx->height);
            var pixelFormat = pCodecCtx->pix_fmt;

            Console.WriteLine($"Succeeded to open the file, {filePath}({codecName}, {frameSize.Width}X{frameSize.Height}, {pixelFormat})");

            #endregion

            var sourceSize        = frameSize;
            var sourcePixelForamt = pixelFormat;
            var destSize          = sourceSize;
            var destPixelFormat   = AVPixelFormat.AV_PIX_FMT_BGR24;

            var convertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height,
                                                       sourcePixelForamt,
                                                       destSize.Width, destSize.Height,
                                                       destPixelFormat,
                                                       ffmpeg.SWS_FAST_BILINEAR,
                                                       null,
                                                       null, null);



            int numBytes = ffmpeg.av_image_get_buffer_size(destPixelFormat, destSize.Width, destSize.Height, 1);

            if (numBytes <= 0)
            {
                Console.WriteLine($"Can't get proper buffer size.");
                return;
            }


            var buffer       = Marshal.AllocHGlobal(numBytes);
            var destData     = new byte_ptrArray4();
            var destLinesize = new int_array4();

            //var sdvb = ffmpeg.av_image_fill_arrays(ref destData, ref destLinesize, null, destPixelFormat, destSize.Width, destSize.Height, 1);

            ffmpeg.av_image_fill_arrays(ref destData, ref destLinesize, (byte *)buffer, destPixelFormat, destSize.Width, destSize.Height, 1);

            AVFrame *pFrame = ffmpeg.av_frame_alloc();
            if (pFrame == null)
            {
                Console.WriteLine($"Failed to allocate frame");
                return;
            }



            AVFrame *pFrameRgb = ffmpeg.av_frame_alloc();
            if (pFrameRgb == null)
            {
                Console.WriteLine($"Failed to allocate RGB frame");
                return;
            }


            AVPacket *pPacket     = ffmpeg.av_packet_alloc();
            int       frameNumber = 0;
            int       error       = 0;

            while (true)
            {
                do
                {
                    try
                    {
                        do
                        {
                            error = ffmpeg.av_read_frame(pFormatCtx, pPacket);

                            if (error == ffmpeg.AVERROR_EOF)
                            {
                                Console.WriteLine($"The frame reached end at {frameNumber - 1}");
                                goto READ_END;
                            }

                            error.ThrowExceptionIfError();
                        } while (pPacket->stream_index != videoCodecIndex);

                        ffmpeg.avcodec_send_packet(pCodecCtx, pPacket);
                    }
                    finally
                    {
                        ffmpeg.av_packet_unref(pPacket);
                    }

                    error = ffmpeg.avcodec_receive_frame(pCodecCtx, pFrame);

                    var result = ffmpeg.sws_scale(convertContext, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, destData, destLinesize);
                    var data   = new byte_ptrArray8();
                    data.UpdateFrom(destData);
                    var lineSize = new int_array8();
                    lineSize.UpdateFrom(destLinesize);
                    //RGB 수정
                    var destFrame = new AVFrame()
                    {
                        data = data, linesize = lineSize, width = destSize.Width, height = destSize.Height
                    };

                    using (var bitmap = new Bitmap(destFrame.width, destFrame.height, destFrame.linesize[0], PixelFormat.Format24bppRgb, (IntPtr)destFrame.data[0]))
                        bitmap.Save($"frame.{frameNumber:D8}.jpg", ImageFormat.Jpeg);
                    Console.WriteLine($"frame: {frameNumber}");

                    frameNumber++;
                } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));
            }

READ_END:
            error.ThrowExceptionIfError();
            //free 코드 추가
            ffmpeg.av_frame_unref(pFrame);
            ffmpeg.av_free(pFrame);
            ffmpeg.av_frame_unref(pFrameRgb);
            ffmpeg.av_free(pFrameRgb);
            ffmpeg.av_packet_unref(pPacket);
            ffmpeg.av_free(pPacket);

            Marshal.FreeHGlobal(buffer);
            ffmpeg.avcodec_free_context(&pCodecCtx);
            ffmpeg.avformat_close_input(&pFormatCtx);
            ffmpeg.sws_freeContext(convertContext);


            Console.ReadKey();
        }