protected override void DisposeUnmanaged() { fixed(AVPacket *p = &_pkt) { FFmpegInvoke.av_free_packet(p); } FFmpegInvoke.avcodec_close(_codec_context); FFmpegInvoke.av_free(_codec_context); FFmpegInvoke.av_freep(&_avFrameYUV->data_0); fixed(AVFrame **p = &_avFrameYUV) { FFmpegInvoke.avcodec_free_frame(p); } FFmpegInvoke.av_freep(&_avFrameBGR->data_0); fixed(AVFrame **p = &_avFrameBGR) { FFmpegInvoke.avcodec_free_frame(p); } }
private static unsafe void Main(string[] args) { Console.WriteLine("Runnung in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32"); // register path to ffmpeg switch (Environment.OSVersion.Platform) { case PlatformID.Win32NT: case PlatformID.Win32S: case PlatformID.Win32Windows: string ffmpegPath = string.Format(@"../../../FFmpeg/bin/windows/{0}", Environment.Is64BitProcess ? "x64" : "x86"); InteropHelper.RegisterLibrariesSearchPath(ffmpegPath); break; case PlatformID.Unix: case PlatformID.MacOSX: string libraryPath = Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH); InteropHelper.RegisterLibrariesSearchPath(libraryPath); break; } // decode 100 frame from url or path //string url = @"../../sample_mpeg4.mp4"; string url = @"http://hubblesource.stsci.edu/sources/video/clips/details/images/centaur_1.mpg"; FFmpegInvoke.av_register_all(); FFmpegInvoke.avcodec_register_all(); FFmpegInvoke.avformat_network_init(); AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context(); if (FFmpegInvoke.avformat_open_input(&pFormatContext, url, null, null) != 0) { throw new Exception("Could not open file"); } if (FFmpegInvoke.avformat_find_stream_info(pFormatContext, null) != 0) { throw new Exception("Could not find stream info"); } AVStream *pStream = null; for (int i = 0; i < pFormatContext->nb_streams; i++) { if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) { pStream = pFormatContext->streams[i]; break; } } if (pStream == null) { throw new Exception("Could not found video stream"); } AVCodecContext codecContext = *(pStream->codec); int width = codecContext.width; int height = codecContext.height; AVPixelFormat sourcePixFmt = codecContext.pix_fmt; AVCodecID codecId = codecContext.codec_id; var convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24; SwsContext * pConvertContext = FFmpegInvoke.sws_getContext(width, height, sourcePixFmt, width, height, convertToPixFmt, FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null); if (pConvertContext == null) { throw new Exception("Could not initialize the conversion context"); } var pConvertedFrame = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame(); int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, width, height); var pConvertedFrameBuffer = (byte *)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize); FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height); AVCodec *pCodec = FFmpegInvoke.avcodec_find_decoder(codecId); if (pCodec == null) { throw new Exception("Unsupported codec"); } // Reusing codec context from stream info, // as an alternative way it could look like this: (but it works not for all kind of codecs) // AVCodecContext* pCodecContext = FFmpegInvoke.avcodec_alloc_context3(pCodec); AVCodecContext *pCodecContext = &codecContext; if ((pCodec->capabilities & FFmpegInvoke.CODEC_CAP_TRUNCATED) == FFmpegInvoke.CODEC_CAP_TRUNCATED) { pCodecContext->flags |= FFmpegInvoke.CODEC_FLAG_TRUNCATED; } if (FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null) < 0) { throw new Exception("Could not open codec"); } AVFrame *pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame(); var packet = new AVPacket(); AVPacket *pPacket = &packet; FFmpegInvoke.av_init_packet(pPacket); int frameNumber = 0; while (frameNumber < 100) { if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0) { throw new Exception("Could not read frame"); } if (pPacket->stream_index != pStream->index) { continue; } int gotPicture = 0; int size = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket); if (size < 0) { throw new Exception(string.Format("Error while decoding frame {0}", frameNumber)); } if (gotPicture == 1) { Console.WriteLine("frame: {0}", frameNumber); byte **src = &pDecodedFrame->data_0; byte **dst = &pConvertedFrame->data_0; FFmpegInvoke.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0, height, dst, pConvertedFrame->linesize); byte *convertedFrameAddress = pConvertedFrame->data_0; var imageBufferPtr = new IntPtr(convertedFrameAddress); int linesize = pConvertedFrame->linesize[0]; using (var bitmap = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr)) { bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg); } frameNumber++; System.Threading.Thread.Sleep(1000); } } FFmpegInvoke.av_free(pConvertedFrame); FFmpegInvoke.av_free(pConvertedFrameBuffer); FFmpegInvoke.sws_freeContext(pConvertContext); FFmpegInvoke.av_free(pDecodedFrame); FFmpegInvoke.avcodec_close(pCodecContext); FFmpegInvoke.avformat_close_input(&pFormatContext); }
public unsafe Mat GetFrame() { if (ProcessedFrame) { return(Frame); } AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context(); if (FFmpegInvoke.avformat_open_input(&pFormatContext, InputLocation, null, null) != 0) { throw new Exception("Failed to open " + InputLocation); } AVStream *pStream = null; for (int iStream = 0; iStream < pFormatContext->nb_streams; iStream++) { if (pFormatContext->streams[iStream]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) { pStream = pFormatContext->streams[iStream]; break; } } if (pStream == null) { throw new Exception("No video stream found in " + InputLocation); } AVCodecContext *pCodecContext = pStream->codec; AVCodecID CodecID = pCodecContext->codec_id; AVCodec * pCodec = FFmpegInvoke.avcodec_find_decoder(CodecID); if (pCodec == null) { throw new Exception("Unsupported codec in " + InputLocation); } FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null); var packet = new AVPacket(); AVPacket *pPacket = &packet; FFmpegInvoke.av_init_packet(pPacket); //Read into pPacket and check. while (pPacket->stream_index != pStream->index) { if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0) { throw new Exception("Failed reading frame in " + InputLocation); } } //Do the video decode. AVFrame *pFrame = FFmpegInvoke.avcodec_alloc_frame(); //Try to seek to first keyframe. //FFmpegInvoke.av_seek_frame(pFormatContext, pStream->index, pFormatContext->start_time, 0); int GotPicture = 0; int Attempts = 0; //bool GoodFrame = false; while (GotPicture != 1 && Attempts < 100) { while (pPacket->stream_index != pStream->index) { if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0) { throw new Exception(String.Format("Failed reading frame in {0}, frame:{1}", InputLocation, Attempts + 1)); } //This should check for keyframes... if (pPacket->flags == 0) { Attempts++; continue; } } int Status = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pFrame, &GotPicture, pPacket); if (Status <= 0) { throw new Exception(string.Format("Error decoding video in {0} frame:{1}", InputLocation, Attempts + 1)); } Attempts++; if (GotPicture == 1) { Console.WriteLine("Successfully decoded a frame at " + Attempts); } } if (GotPicture == 0) { throw new Exception("Failed to decode a good frame in " + InputLocation); } //Conversion to Mat //http://stackoverflow.com/questions/29263090/ffmpeg-avframe-to-opencv-mat-conversion // int FrameHeight = pFrame->height; int FrameWidth = pFrame->width; SwsContext *pConvertContext = FFmpegInvoke.sws_getContext(FrameWidth, FrameHeight, pCodecContext->pix_fmt, FrameWidth, FrameHeight, AVPixelFormat.PIX_FMT_BGR24, FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null); if (pConvertContext == null) { throw new Exception("Failed to initialise Mat conversion context."); } var pConvertedFrame = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame(); //Setup the converted frame int ConvertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(AVPixelFormat.PIX_FMT_BGR24, FrameWidth, FrameHeight); var pConvertedFrameBuffer = (byte *)FFmpegInvoke.av_malloc((uint)ConvertedFrameBufferSize); if (FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, AVPixelFormat.PIX_FMT_BGR24, FrameWidth, FrameHeight) < 0) { throw new Exception("Failed to setup conversion frame"); } byte **src = &pFrame->data_0; byte **dst = &pConvertedFrame->data_0; FFmpegInvoke.sws_scale(pConvertContext, src, pFrame->linesize, 0, FrameHeight, dst, pConvertedFrame->linesize); var imageBufferPtr = new IntPtr(pConvertedFrame->data_0); Frame = new Mat(FrameHeight, FrameWidth, Emgu.CV.CvEnum.DepthType.Cv8U, 3, imageBufferPtr, *pConvertedFrame->linesize); //Cleanup FFmpegInvoke.av_free(pFrame); FFmpegInvoke.av_free(pConvertedFrame); FFmpegInvoke.sws_freeContext(pConvertContext); FFmpegInvoke.avcodec_close(pCodecContext); FFmpegInvoke.avformat_close_input(&pFormatContext); ProcessedFrame = true; return(Frame); }
private static unsafe void Main(string[] args) { // register LD_LIBRARY_PATH on window InteropHelper.RegisterLibrariesSearchPath(Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH)); string url = @"http://www.fileformat.info/format/mpeg/sample/031699cb978244b8a3adf1e81cb2ac7c/download"; FFmpegInvoke.av_register_all(); FFmpegInvoke.avcodec_register_all(); FFmpegInvoke.avformat_network_init(); AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context(); if (FFmpegInvoke.avformat_open_input(&pFormatContext, url, null, null) != 0) { throw new Exception("Could not open file"); } if (FFmpegInvoke.av_find_stream_info(pFormatContext) != 0) { throw new Exception("Could not find stream info"); } AVStream *pStream = null; for (int i = 0; i < pFormatContext->nb_streams; i++) { if (pFormatContext->streams[0]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) { pStream = pFormatContext->streams[0]; break; } } if (pStream == null) { throw new Exception("Could not found video stream"); } AVCodecContext codecContext = *(pStream->codec); int width = codecContext.width; int height = codecContext.height; AVPixelFormat sourcePixFmt = codecContext.pix_fmt; AVCodecID codecId = codecContext.codec_id; var convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24; SwsContext * pConvertContext = FFmpegInvoke.sws_getContext(width, height, sourcePixFmt, width, height, convertToPixFmt, FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null); if (pConvertContext == null) { throw new Exception("Could not initialize the conversion context"); } var pConvertedFrame = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame(); int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, width, height); var pConvertedFrameBuffer = (byte *)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize); FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height); AVCodec *pCodec = FFmpegInvoke.avcodec_find_decoder(codecId); if (pCodec == null) { throw new Exception("Unsupported codec"); } AVCodecContext *pCodecContext = FFmpegInvoke.avcodec_alloc_context3(pCodec); if ((pCodec->capabilities & FFmpegInvoke.CODEC_CAP_TRUNCATED) == FFmpegInvoke.CODEC_CAP_TRUNCATED) { pCodecContext->flags |= FFmpegInvoke.CODEC_FLAG_TRUNCATED; } if (FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null) < 0) { throw new Exception("Could not open codec"); } AVFrame *pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame(); var packet = new AVPacket(); AVPacket *pPacket = &packet; FFmpegInvoke.av_init_packet(pPacket); int frameNumber = 0; while (frameNumber < 100) { Console.WriteLine("frame: {0}", frameNumber); if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0) { throw new Exception("Could not read frame"); } if (pPacket->stream_index != pStream->index) { continue; } int gotPicture = 0; int size = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket); if (size < 0) { throw new Exception(string.Format("Error while decoding frame {0}", frameNumber)); } if (gotPicture == 1) { byte **src = &pDecodedFrame->data_0; byte **dst = &pConvertedFrame->data_0; FFmpegInvoke.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0, height, dst, pConvertedFrame->linesize); byte *convertedFrameAddress = pConvertedFrame->data_0; var imageBufferPtr = new IntPtr(convertedFrameAddress); using (var bitmap = new Bitmap(width, height, pConvertedFrame->linesize[0], PixelFormat.Format24bppRgb, imageBufferPtr)) { bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg); } } frameNumber++; } FFmpegInvoke.av_free(pConvertedFrame); FFmpegInvoke.av_free(pConvertedFrameBuffer); FFmpegInvoke.sws_freeContext(pConvertContext); FFmpegInvoke.av_free(pDecodedFrame); FFmpegInvoke.avcodec_close(pCodecContext); FFmpegInvoke.avformat_close_input(&pFormatContext); }