/// <summary> /// Create the actual Bitmap from the dataPacket. /// This done by using both the input- and outputdata. /// </summary> /// <param name="videoInputData">Input Data, contains input codec, frame Data etc.</param> /// <param name="videoOutputData">Output Data, contains output codec (settings).</param> /// <returns>The Bitmap as result from the frame convertion, null when operation failed.</returns> private Bitmap VideoDataToBitmap(VideoInputData videoInputData, VideoOuputData videoOutputData) { // Allocate video frames IntPtr pFrame = _ffmpeg.AvcodecAllocFrame(); IntPtr pOutFrame = _ffmpeg.AvcodecAllocFrame(); // Init some attributes int gotPicture = 0; // Return / output bitmap object Bitmap bitmap = null; // Video packet (pointer) to decode IntPtr pPacket = Allocate <AvPacket>(); AvPacket packet; try { while (_ffmpeg.AvReadFrame(videoInputData.PInputFormatContext, pPacket) >= 0) { packet = PtrToStructure <AvPacket>(pPacket); // Is this a packet from the video stream? if (packet.StreamIndex == videoInputData.VideoStartIndex) { // Decode (input) video frame _ffmpeg.AvcodecDecodeVideo(videoInputData.PInputCodecContext, pFrame, ref gotPicture, packet.Data, packet.Size); if (gotPicture != 0) { // output encoding // create output buffer int bufSize = _ffmpeg.AvpictureGetSize((int)PixelFormat.PixFmtBgra, videoInputData.Width, videoInputData.Height); byte[] outbuf = new byte[bufSize]; _ffmpeg.AvpictureFill(pOutFrame, Marshal.UnsafeAddrOfPinnedArrayElement(outbuf, 0), (int)PixelFormat.PixFmtBgra, videoInputData.Width, videoInputData.Height); AvFrame frame = PtrToStructure <AvFrame>(pFrame); AvFrame outFrame = PtrToStructure <AvFrame>(pOutFrame); IntPtr swsContext = IntPtr.Zero; try { swsContext = _ffmpeg.SwsGetContext(videoInputData.Width, videoInputData.Height, (int)videoInputData.VideoCodecContext.PixFmt, videoInputData.Width, videoInputData.Height, (int)PixelFormat.PixFmtBgra, SwsFastBilinear, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero); _ffmpeg.SwsScale(swsContext, frame.Data, frame.Linesize, 0, videoInputData.Height, outFrame.Data, outFrame.Linesize); // encode RAW output frame int outLength = _ffmpeg.AvcodecEncodeVideo(videoOutputData.POutputCodecContext, Marshal.UnsafeAddrOfPinnedArrayElement( outbuf, 0), bufSize, pOutFrame); // check if frame encode succeeded if (outLength > 0) { try { bitmap = CopyRawDataToBitmap(videoInputData, outbuf); break; // A bitmap has been created, stop searching for a frame. } catch (Exception) { #if DEBUG Log.Info("An error occurred while creating the output bitmap."); #endif } } #if DEBUG else { Log.Info("Ouput buffer is empty, no bitmap was created."); } #endif } finally { if (swsContext != IntPtr.Zero) { _ffmpeg.SwsFreeContext(swsContext); } } } } } } finally { _ffmpeg.AvFree(pOutFrame); _ffmpeg.AvFree(pFrame); _ffmpeg.AvFreePacket(pPacket); Marshal.FreeHGlobal(pPacket); } return(bitmap); }
public static void Main(string[] args) { WrapperUtils.RegisterLibrariesPathSimple("ffmpeg-x64", "ffmpeg-x86"); AvFormat.RegisterAll(); AvCodec.RegisterAll(); AvFormat.NetworkInit(); AvFormatContext format = AvFormatContext.Allocate(); if (!format.OpenInput(@"http://www.quirksmode.org/html5/videos/big_buck_bunny.mp4")) { throw new Exception("Failed to open file :("); } if (!format.FindStreamInfo()) { throw new Exception("Failed to find stream info :("); } AvStream pStream = null; foreach (AvStream avStream in format.Streams) { if (avStream.Codec.Type == AvMediaType.Video) { pStream = avStream; break; } } if (pStream == null) { throw new Exception("Could not find video stream :("); } AvCodecContext codecContext = pStream.Codec; int width = codecContext.Width; int height = codecContext.Height; AvPixelFormat sourceFormat = codecContext.PixelFormat; AvPixelFormat targetFormat = AvPixelFormat.Bgr24; SwsContext convertContext = SwsContext.Get(width, height, sourceFormat, width, height, targetFormat, SwsFlags.FastBilinear); if (convertContext == null) { throw new Exception("Could not initialize the conversion context"); } AvFrame convertedFrame = AvFrame.Allocate(); int convertedFrameBufferSize = AvPicture.GetSize(targetFormat, width, height); SByteBuffer convertedFrameBuffer = AvUtil.Malloc((ulong)convertedFrameBufferSize); ((AvPicture)convertedFrame).Fill(convertedFrameBuffer, targetFormat, width, height); AvCodec codec = AvCodec.FindDecoder(codecContext.Id); if (codec == null) { throw new Exception("Unsupported codec"); } if (codec.HasCapability(CodecCapabilities.Truncated)) { codecContext.Flags |= CodecFlags.Truncated; } if (!codecContext.Open2(codec)) { throw new Exception("Could not open codec"); } AvFrame frame = AvFrame.Allocate(); AvPacket packet = AvPacket.Create(); packet.Init(); int frameNumber = 0; while (frameNumber < 500) { if (!format.ReadFrame(packet)) { throw new Exception("Could not read frame!"); } if (packet.StreamIndex != pStream.Index) { continue; } int gotPicture; int size = codecContext.DecodeVideo2(frame, out gotPicture, packet); if (size < 0) { throw new Exception("Error while decoding frame " + frameNumber); } if (gotPicture == 1) { Console.WriteLine($"Frame: {frameNumber}"); SByteBufferArray src = frame.Data; SByteBufferArray dst = convertedFrame.Data; IntArray srcStride = frame.LineSize; IntArray dstStride = convertedFrame.LineSize; convertContext.Scale(src, srcStride, 0, height, dst, dstStride); int linesize = dstStride[0]; using ( Bitmap bitmap = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, convertedFrame.Data0)) { bitmap.Save(@"frame.buffer." + frameNumber + ".jpg", ImageFormat.Jpeg); } frameNumber++; } } convertedFrame.Free(); convertedFrameBuffer.Free(); convertContext.Free(); frame.Free(); codecContext.Close(); format.CloseInput(); }