/// <summary> /// Create an output codec using the settings found in the VideoInputData. /// </summary> /// <param name="videoInputData">The input video settings, probably found in the source packet.</param> /// <returns>The ouput codec settings, RAW video codec.</returns> private VideoOuputData CreateOutputCodec(VideoInputData videoInputData) { // open output codec IntPtr pOutputCodec = _ffmpeg.AvcodecFindEncoder(CodecId.CodecIdRawvideo); if (pOutputCodec == IntPtr.Zero) { #if DEBUG Log.Info("Could not load output codec."); #endif return(null); } // Setup target encoding context (output settings) VideoOuputData videoOutputData = new VideoOuputData(_ffmpeg, _ffmpeg.AvcodecAllocContext()); AvCodecContext outputCodecContext = PtrToStructure <AvCodecContext>(videoOutputData.POutputCodecContext); outputCodecContext.Width = videoInputData.Width; outputCodecContext.Height = videoInputData.Height; outputCodecContext.PixFmt = OutputPixelFormat; Marshal.StructureToPtr(outputCodecContext, videoOutputData.POutputCodecContext, false); if (_ffmpeg.AvcodecOpen(videoOutputData.POutputCodecContext, pOutputCodec) < 0) { #if DEBUG Log.Info("Could not open output codec."); #endif return(null); } return(videoOutputData); }
/// <summary> /// Start a frame convert using the Data from the dataPacket. /// If headerPacket is not null, it will be used as header source. /// </summary> /// <param name="dataPacket">Contains the frame Data.</param> /// <param name="headerPacket">Datapacket as headersource, null is allowed.</param> /// <returns>The Bitmap as result from the frame convertion, null when operation failed.</returns> public Bitmap FrameToBitmap(IResultNode dataPacket, IResultNode headerPacket) { if (!_ffmpegRegistered) { return(null); } if (dataPacket == null) { throw new ArgumentNullException("dataPacket", "Invalid input parameters(null) for FFmpeg frame convertion."); } if (headerPacket == null) { headerPacket = dataPacket; } try { // Detect the codec from the Data packet using (VideoInputData videoInputData = CreateCodecFromPacket(dataPacket, headerPacket)) { if (videoInputData == null) { return(null); } // Create the RAW video output codec using (VideoOuputData videoOutputData = CreateOutputCodec(videoInputData)) { if (videoOutputData == null) { return(null); } // Create to bitmap from the first video frame in Data return(VideoDataToBitmap(videoInputData, videoOutputData)); } } } catch (Exception) { return(null); } finally { lock (_videoMemoryStreams) { _videoMemoryStreams.Remove(_convertId); } } }
/// <summary> /// Create the actual Bitmap from the dataPacket. /// This done by using both the input- and outputdata. /// </summary> /// <param name="videoInputData">Input Data, contains input codec, frame Data etc.</param> /// <param name="videoOutputData">Output Data, contains output codec (settings).</param> /// <returns>The Bitmap as result from the frame convertion, null when operation failed.</returns> private Bitmap VideoDataToBitmap(VideoInputData videoInputData, VideoOuputData videoOutputData) { // Allocate video frames IntPtr pFrame = _ffmpeg.AvcodecAllocFrame(); IntPtr pOutFrame = _ffmpeg.AvcodecAllocFrame(); // Init some attributes int gotPicture = 0; // Return / output bitmap object Bitmap bitmap = null; // Video packet (pointer) to decode IntPtr pPacket = Allocate <AvPacket>(); AvPacket packet; try { while (_ffmpeg.AvReadFrame(videoInputData.PInputFormatContext, pPacket) >= 0) { packet = PtrToStructure <AvPacket>(pPacket); // Is this a packet from the video stream? if (packet.StreamIndex == videoInputData.VideoStartIndex) { // Decode (input) video frame _ffmpeg.AvcodecDecodeVideo(videoInputData.PInputCodecContext, pFrame, ref gotPicture, packet.Data, packet.Size); if (gotPicture != 0) { // output encoding // create output buffer int bufSize = _ffmpeg.AvpictureGetSize((int)PixelFormat.PixFmtBgra, videoInputData.Width, videoInputData.Height); byte[] outbuf = new byte[bufSize]; _ffmpeg.AvpictureFill(pOutFrame, Marshal.UnsafeAddrOfPinnedArrayElement(outbuf, 0), (int)PixelFormat.PixFmtBgra, videoInputData.Width, videoInputData.Height); AvFrame frame = PtrToStructure <AvFrame>(pFrame); AvFrame outFrame = PtrToStructure <AvFrame>(pOutFrame); IntPtr swsContext = IntPtr.Zero; try { swsContext = _ffmpeg.SwsGetContext(videoInputData.Width, videoInputData.Height, (int)videoInputData.VideoCodecContext.PixFmt, videoInputData.Width, videoInputData.Height, (int)PixelFormat.PixFmtBgra, SwsFastBilinear, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero); _ffmpeg.SwsScale(swsContext, frame.Data, frame.Linesize, 0, videoInputData.Height, outFrame.Data, outFrame.Linesize); // encode RAW output frame int outLength = _ffmpeg.AvcodecEncodeVideo(videoOutputData.POutputCodecContext, Marshal.UnsafeAddrOfPinnedArrayElement( outbuf, 0), bufSize, pOutFrame); // check if frame encode succeeded if (outLength > 0) { try { bitmap = CopyRawDataToBitmap(videoInputData, outbuf); break; // A bitmap has been created, stop searching for a frame. } catch (Exception) { #if DEBUG Log.Info("An error occurred while creating the output bitmap."); #endif } } #if DEBUG else { Log.Info("Ouput buffer is empty, no bitmap was created."); } #endif } finally { if (swsContext != IntPtr.Zero) { _ffmpeg.SwsFreeContext(swsContext); } } } } } } finally { _ffmpeg.AvFree(pOutFrame); _ffmpeg.AvFree(pFrame); _ffmpeg.AvFreePacket(pPacket); Marshal.FreeHGlobal(pPacket); } return(bitmap); }