/// <summary> /// /// </summary> /// <param name="videoInputData">Input Data object, used for output settings.</param> /// <param name="outbuf">Raw Data buffer.</param> /// <returns>RGB24 Bitmap, created from the output buffer.</returns> private static Bitmap CopyRawDataToBitmap(VideoInputData videoInputData, byte[] outbuf) { // Unsafe block to boost performance of bitmap copy Bitmap bitmap; bitmap = new Bitmap(videoInputData.Width, videoInputData.Height, System.Drawing.Imaging.PixelFormat.Format32bppPArgb); Rectangle bitmapRect = new Rectangle(0, 0, videoInputData.Width, videoInputData.Height); BitmapData bitmapData = bitmap.LockBits(bitmapRect, ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format32bppPArgb); var imageStride = videoInputData.Width * 4; if (bitmapData.Stride == imageStride) { Marshal.Copy(outbuf, 0, bitmapData.Scan0, outbuf.Length); } else { for (var i = 0; i < videoInputData.Height; i++) { var start = new IntPtr(bitmapData.Scan0.ToInt64() + i * bitmapData.Stride); Marshal.Copy(Marshal.UnsafeAddrOfPinnedArrayElement(outbuf, i * imageStride), new[] { start }, 0, imageStride); } } bitmap.UnlockBits(bitmapData); return(bitmap); }
/// <summary> /// Create an output codec using the settings found in the VideoInputData. /// </summary> /// <param name="videoInputData">The input video settings, probably found in the source packet.</param> /// <returns>The ouput codec settings, RAW video codec.</returns> private VideoOuputData CreateOutputCodec(VideoInputData videoInputData) { // open output codec IntPtr pOutputCodec = _ffmpeg.AvcodecFindEncoder(CodecId.CodecIdRawvideo); if (pOutputCodec == IntPtr.Zero) { #if DEBUG Log.Info("Could not load output codec."); #endif return(null); } // Setup target encoding context (output settings) VideoOuputData videoOutputData = new VideoOuputData(_ffmpeg, _ffmpeg.AvcodecAllocContext()); AvCodecContext outputCodecContext = PtrToStructure <AvCodecContext>(videoOutputData.POutputCodecContext); outputCodecContext.Width = videoInputData.Width; outputCodecContext.Height = videoInputData.Height; outputCodecContext.PixFmt = OutputPixelFormat; Marshal.StructureToPtr(outputCodecContext, videoOutputData.POutputCodecContext, false); if (_ffmpeg.AvcodecOpen(videoOutputData.POutputCodecContext, pOutputCodec) < 0) { #if DEBUG Log.Info("Could not open output codec."); #endif return(null); } return(videoOutputData); }
/// <summary> /// Start a frame convert using the Data from the dataPacket. /// If headerPacket is not null, it will be used as header source. /// </summary> /// <param name="dataPacket">Contains the frame Data.</param> /// <param name="headerPacket">Datapacket as headersource, null is allowed.</param> /// <returns>The Bitmap as result from the frame convertion, null when operation failed.</returns> public Bitmap FrameToBitmap(IResultNode dataPacket, IResultNode headerPacket) { if (!_ffmpegRegistered) { return(null); } if (dataPacket == null) { throw new ArgumentNullException("dataPacket", "Invalid input parameters(null) for FFmpeg frame convertion."); } if (headerPacket == null) { headerPacket = dataPacket; } try { // Detect the codec from the Data packet using (VideoInputData videoInputData = CreateCodecFromPacket(dataPacket, headerPacket)) { if (videoInputData == null) { return(null); } // Create the RAW video output codec using (VideoOuputData videoOutputData = CreateOutputCodec(videoInputData)) { if (videoOutputData == null) { return(null); } // Create to bitmap from the first video frame in Data return(VideoDataToBitmap(videoInputData, videoOutputData)); } } } catch (Exception) { return(null); } finally { lock (_videoMemoryStreams) { _videoMemoryStreams.Remove(_convertId); } } }
/// <summary> /// Create the actual Bitmap from the dataPacket. /// This done by using both the input- and outputdata. /// </summary> /// <param name="videoInputData">Input Data, contains input codec, frame Data etc.</param> /// <param name="videoOutputData">Output Data, contains output codec (settings).</param> /// <returns>The Bitmap as result from the frame convertion, null when operation failed.</returns> private Bitmap VideoDataToBitmap(VideoInputData videoInputData, VideoOuputData videoOutputData) { // Allocate video frames IntPtr pFrame = _ffmpeg.AvcodecAllocFrame(); IntPtr pOutFrame = _ffmpeg.AvcodecAllocFrame(); // Init some attributes int gotPicture = 0; // Return / output bitmap object Bitmap bitmap = null; // Video packet (pointer) to decode IntPtr pPacket = Allocate <AvPacket>(); AvPacket packet; try { while (_ffmpeg.AvReadFrame(videoInputData.PInputFormatContext, pPacket) >= 0) { packet = PtrToStructure <AvPacket>(pPacket); // Is this a packet from the video stream? if (packet.StreamIndex == videoInputData.VideoStartIndex) { // Decode (input) video frame _ffmpeg.AvcodecDecodeVideo(videoInputData.PInputCodecContext, pFrame, ref gotPicture, packet.Data, packet.Size); if (gotPicture != 0) { // output encoding // create output buffer int bufSize = _ffmpeg.AvpictureGetSize((int)PixelFormat.PixFmtBgra, videoInputData.Width, videoInputData.Height); byte[] outbuf = new byte[bufSize]; _ffmpeg.AvpictureFill(pOutFrame, Marshal.UnsafeAddrOfPinnedArrayElement(outbuf, 0), (int)PixelFormat.PixFmtBgra, videoInputData.Width, videoInputData.Height); AvFrame frame = PtrToStructure <AvFrame>(pFrame); AvFrame outFrame = PtrToStructure <AvFrame>(pOutFrame); IntPtr swsContext = IntPtr.Zero; try { swsContext = _ffmpeg.SwsGetContext(videoInputData.Width, videoInputData.Height, (int)videoInputData.VideoCodecContext.PixFmt, videoInputData.Width, videoInputData.Height, (int)PixelFormat.PixFmtBgra, SwsFastBilinear, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero); _ffmpeg.SwsScale(swsContext, frame.Data, frame.Linesize, 0, videoInputData.Height, outFrame.Data, outFrame.Linesize); // encode RAW output frame int outLength = _ffmpeg.AvcodecEncodeVideo(videoOutputData.POutputCodecContext, Marshal.UnsafeAddrOfPinnedArrayElement( outbuf, 0), bufSize, pOutFrame); // check if frame encode succeeded if (outLength > 0) { try { bitmap = CopyRawDataToBitmap(videoInputData, outbuf); break; // A bitmap has been created, stop searching for a frame. } catch (Exception) { #if DEBUG Log.Info("An error occurred while creating the output bitmap."); #endif } } #if DEBUG else { Log.Info("Ouput buffer is empty, no bitmap was created."); } #endif } finally { if (swsContext != IntPtr.Zero) { _ffmpeg.SwsFreeContext(swsContext); } } } } } } finally { _ffmpeg.AvFree(pOutFrame); _ffmpeg.AvFree(pFrame); _ffmpeg.AvFreePacket(pPacket); Marshal.FreeHGlobal(pPacket); } return(bitmap); }
/// <summary> /// Creates an input codec, based on the settings found in the dataPacket/headerPacket. /// </summary> /// <param name="dataPacket">Contains the frame Data.</param> /// <param name="headerPacket">Contains the frame header Data.</param> /// <returns></returns> private VideoInputData CreateCodecFromPacket(IResultNode dataPacket, IResultNode headerPacket) { // Load the framedata from the packet to the byte buffer if (!PacketDataToBuffer(dataPacket, headerPacket)) { #if DEBUG Log.Info("No frame header information is available."); #endif return(null); } IntPtr pInputFormatContext; if (_ffmpeg.AvOpenInputFile(out pInputFormatContext, DefraserProtocolPrefix + _convertId, IntPtr.Zero, 0, IntPtr.Zero) < 0) { #if DEBUG Log.Info("Could not open input file."); #endif return(null); } if (_ffmpeg.AvFindStreamInfo(pInputFormatContext) < 0) { #if DEBUG Log.Info("Could not find (input)stream information."); #endif return(null); } VideoInputData videoInputData = null; AvFormatContext formatContext = PtrToStructure <AvFormatContext>(pInputFormatContext); for (int i = 0; i < formatContext.NbStreams; ++i) { AvStream stream = PtrToStructure <AvStream>(formatContext.Streams[i]); AvCodecContext codec = PtrToStructure <AvCodecContext>(stream.Codec); if (codec.CodecType == CodecType.CodecTypeVideo) { AvCodecContext videoCodecContext = PtrToStructure <AvCodecContext>(stream.Codec); IntPtr pVideoCodec = _ffmpeg.AvcodecFindDecoder(videoCodecContext.CodecId); if (pVideoCodec == IntPtr.Zero) { #if DEBUG Log.Info("could not find input codec"); #endif return(null); } if (_ffmpeg.AvcodecOpen(stream.Codec, pVideoCodec) < 0) { #if DEBUG Log.Info("Could not open input codec."); #endif return(null); } // setup object used to decode (input) frames videoInputData = new VideoInputData(_ffmpeg, pInputFormatContext, stream.Codec, PtrToStructure <AvCodecContext>(stream.Codec), i); break; // stop seaching for video stream } } if (videoInputData == null) { #if DEBUG Log.Info("Could not find video stream."); #endif return(null); } return(videoInputData); }