public static extern int sws_getColorspaceDetails(SwsContext* c, int** inv_table, int* srcRange, int** table, int* dstRange, int* brightness, int* contrast, int* saturation);
public static extern SwsContext* sws_getCachedContext(SwsContext* context, int srcW, int srcH, AVPixelFormat srcFormat, int dstW, int dstH, AVPixelFormat dstFormat, int flags, SwsFilter* srcFilter, SwsFilter* dstFilter, double* param);
public static extern int sws_scale(SwsContext* c, byte** srcSlice, int* srcStride, int srcSliceY, int srcSliceH, byte** dst, int* dstStride);
public static extern int sws_setColorspaceDetails(SwsContext* c, int* inv_table, int srcRange, int* table, int dstRange, int brightness, int contrast, int saturation);
public static extern int sws_init_context(SwsContext* sws_context, SwsFilter* srcFilter, SwsFilter* dstFilter);
public static extern void sws_freeContext(SwsContext* swsContext);
public static extern int sws_scale_ordered(SwsContext* context, byte** src, int* srcStride, int srcSliceY, int srcSliceH, byte** dst, int dstStride);
public static void sws_scale(SwsContext* sws_context, byte* unmanagedPointer, int* linesize1, int v1, int height, byte** v2, int* linesize2) { throw new NotImplementedException(); }
private unsafe static Bitmap ExtractNextImage(AVFormatContext* pFormatContext, AVCodecContext* pCodecContext, AVPacket* pPacket, AVStream* vidStream, SwsContext* pConvertContext, AVFrame* pDecodedFrame, AVPicture* pConvertedFrame, int width, int height, bool createCopy, double timeBase, out TimeSpan pos) { pos = new TimeSpan(); Bitmap result = null; int gotPicture = 0; while (gotPicture != 1) { if (ffmpeg.av_read_frame(pFormatContext, pPacket) < 0) { result = null; break; } if (pPacket->stream_index != vidStream->index) continue; gotPicture = 0; int size = ffmpeg.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket); if (size < 0) throw new Exception("Error while decoding frame!"); if (gotPicture == 1) { // Get current position from frame pos = ToTimeSpan(ffmpeg.av_frame_get_best_effort_timestamp(pDecodedFrame), timeBase); // Extract image sbyte** src = &pDecodedFrame->data0; sbyte** dst = &pConvertedFrame->data0; ffmpeg.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0, height, dst, pConvertedFrame->linesize); var imageBufferPtr = new IntPtr(pConvertedFrame->data0); int linesize = pConvertedFrame->linesize[0]; Bitmap img = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr); result = createCopy ? new Bitmap(img) : img; } } return result; }
private unsafe Bitmap ExtractNextImage2(AVCodecContext* pCodecContext, AVPacket* pPacket, AVStream* vidStream, SwsContext* pConvertContext, AVFrame* pDecodedFrame, AVPicture* pConvertedFrame, int width, int height, bool createCopy, double timeBase, int? delay, TimeSpan prev, out TimeSpan pos, out bool end) { pos = new TimeSpan(); end = false; Bitmap result = null; int gotPicture = 0; while (gotPicture != 1) { if (ffmpeg.av_read_frame(this.AVFormatContext, pPacket) < 0) { end = true; result = null; break; } if (pPacket->stream_index != vidStream->index) continue; gotPicture = 0; int size = ffmpeg.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket); if (size < 0) throw new Exception("Error while decoding frame!"); if (gotPicture == 1) { // Get current position from frame pos = ToTimeSpan(ffmpeg.av_frame_get_best_effort_timestamp(pDecodedFrame), timeBase); if (delay.HasValue && prev != TimeSpan.Zero && (pos - prev).TotalMilliseconds < delay) { return null; } // Extract image sbyte** src = &pDecodedFrame->data0; sbyte** dst = &pConvertedFrame->data0; int src_height = pCodecContext->height; ffmpeg.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0, src_height, dst, pConvertedFrame->linesize); var imageBufferPtr = new IntPtr(pConvertedFrame->data0); int linesize = pConvertedFrame->linesize[0]; result = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr); } } return result; }
public static void Main(string[] args) { WrapperUtils.RegisterLibrariesPathSimple("ffmpeg-x64", "ffmpeg-x86"); AvFormat.RegisterAll(); AvCodec.RegisterAll(); AvFormat.NetworkInit(); AvFormatContext format = AvFormatContext.Allocate(); if (!format.OpenInput(@"http://www.quirksmode.org/html5/videos/big_buck_bunny.mp4")) { throw new Exception("Failed to open file :("); } if (!format.FindStreamInfo()) { throw new Exception("Failed to find stream info :("); } AvStream pStream = null; foreach (AvStream avStream in format.Streams) { if (avStream.Codec.Type == AvMediaType.Video) { pStream = avStream; break; } } if (pStream == null) { throw new Exception("Could not find video stream :("); } AvCodecContext codecContext = pStream.Codec; int width = codecContext.Width; int height = codecContext.Height; AvPixelFormat sourceFormat = codecContext.PixelFormat; AvPixelFormat targetFormat = AvPixelFormat.Bgr24; SwsContext convertContext = SwsContext.Get(width, height, sourceFormat, width, height, targetFormat, SwsFlags.FastBilinear); if (convertContext == null) { throw new Exception("Could not initialize the conversion context"); } AvFrame convertedFrame = AvFrame.Allocate(); int convertedFrameBufferSize = AvPicture.GetSize(targetFormat, width, height); SByteBuffer convertedFrameBuffer = AvUtil.Malloc((ulong)convertedFrameBufferSize); ((AvPicture)convertedFrame).Fill(convertedFrameBuffer, targetFormat, width, height); AvCodec codec = AvCodec.FindDecoder(codecContext.Id); if (codec == null) { throw new Exception("Unsupported codec"); } if (codec.HasCapability(CodecCapabilities.Truncated)) { codecContext.Flags |= CodecFlags.Truncated; } if (!codecContext.Open2(codec)) { throw new Exception("Could not open codec"); } AvFrame frame = AvFrame.Allocate(); AvPacket packet = AvPacket.Create(); packet.Init(); int frameNumber = 0; while (frameNumber < 500) { if (!format.ReadFrame(packet)) { throw new Exception("Could not read frame!"); } if (packet.StreamIndex != pStream.Index) { continue; } int gotPicture; int size = codecContext.DecodeVideo2(frame, out gotPicture, packet); if (size < 0) { throw new Exception("Error while decoding frame " + frameNumber); } if (gotPicture == 1) { Console.WriteLine($"Frame: {frameNumber}"); SByteBufferArray src = frame.Data; SByteBufferArray dst = convertedFrame.Data; IntArray srcStride = frame.LineSize; IntArray dstStride = convertedFrame.LineSize; convertContext.Scale(src, srcStride, 0, height, dst, dstStride); int linesize = dstStride[0]; using ( Bitmap bitmap = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, convertedFrame.Data0)) { bitmap.Save(@"frame.buffer." + frameNumber + ".jpg", ImageFormat.Jpeg); } frameNumber++; } } convertedFrame.Free(); convertedFrameBuffer.Free(); convertContext.Free(); frame.Free(); codecContext.Close(); format.CloseInput(); }