public void ProcessImage(IntPtr pSrcFrameInfo, FFmpeg.AVFrame srcFrameInfo) { int ret; _yDataBytes.Clear(); _pSrcFrameInfo = pSrcFrameInfo; _srcFrameInfo = srcFrameInfo; FFmpeg.AVPixelFormat dstPixelFormat = _dstAVPixelFormat[(int)DstPixelFormat]; if (_srcCodecContext.pix_fmt == dstPixelFormat) { return; } if (_pSwsContext == IntPtr.Zero) { _pSwsContext = FFmpeg.SwScale.sws_getContext( _srcFrameInfo.width, _srcFrameInfo.height, _srcCodecContext.pix_fmt, DstSize.Width, DstSize.Height, dstPixelFormat, ResizeMethod, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero); if (_pSwsContext == IntPtr.Zero) { throw new Exception("Failed to create SwScale context"); } _dstFrameBytesLength = FFmpeg.avpicture_get_size((int)dstPixelFormat, DstSize.Width, DstSize.Height); _pDstFrameBytes = Marshal.AllocHGlobal(_dstFrameBytesLength); RtlZeroMemory(_pDstFrameBytes, _dstFrameBytesLength); _pDstFrameInfo = FFmpeg.avcodec_alloc_frame(); ret = FFmpeg.avpicture_fill(_pDstFrameInfo, _pDstFrameBytes, dstPixelFormat, DstSize.Width, DstSize.Height); if (ret < 0) { throw new Exception("Failed to fill picture: " + ret.ToString()); } Marshal.WriteInt32(_pDstFrameInfo, _marshalOffset_AVFrame_width, DstSize.Width); Marshal.WriteInt32(_pDstFrameInfo, _marshalOffset_AVFrame_height, DstSize.Height); Marshal.WriteInt32(_pDstFrameInfo, _marshalOffset_AVFrame_interlaced_frame, _srcFrameInfo.interlaced_frame); _dstFrameInfo = (FFmpeg.AVFrame)Marshal.PtrToStructure(_pDstFrameInfo, typeof(FFmpeg.AVFrame)); } //FFmpeg.av_log_set_callback(new FFmpeg.AVLogCallback(AVLogCallback)); IntPtr pScaleSrcData = _pSrcFrameInfo + _marshalOffset_AVFrame_data; IntPtr pScaleSrcLineSize = _pSrcFrameInfo + _marshalOffset_AVFrame_linesize; IntPtr pScaleDstData = _pDstFrameInfo + _marshalOffset_AVFrame_data; IntPtr pScaleDstLineSize = _pDstFrameInfo + _marshalOffset_AVFrame_linesize; ret = FFmpeg.SwScale.sws_scale(_pSwsContext, pScaleSrcData, pScaleSrcLineSize, 0, _srcFrameInfo.height, pScaleDstData, pScaleDstLineSize); if (ret < 0) { throw new Exception("Failed to scale frame: " + ret.ToString()); } }
// Specify -1 to get full interlaced frame public ImageProcessing.YData GetYData(int fieldIndex = -1) { FFmpeg.AVFrame frameInfo = _dstFrameInfo; FFmpeg.AVPixelFormat dstPixelFormat = _dstAVPixelFormat[(int)DstPixelFormat]; if (_srcCodecContext.pix_fmt == dstPixelFormat) { frameInfo = _srcFrameInfo; } int width = frameInfo.width; int height = frameInfo.height; int stride = frameInfo.linesize[0]; byte[] data = null; if ((fieldIndex != -1) && (frameInfo.interlaced_frame != 0)) { height /= _srcCodecContext.ticks_per_frame; } if (!_yDataBytes.TryGetValue(fieldIndex, out data)) { if ((fieldIndex != -1) && (frameInfo.interlaced_frame != 0)) { int dstSize = stride * height; data = new byte[dstSize]; int firstRow = fieldIndex; if (frameInfo.top_field_first != 0) { firstRow = _srcCodecContext.ticks_per_frame - fieldIndex; } IntPtr pFrameBytes = frameInfo.data[0] + (stride * firstRow); int srcStride = stride * _srcCodecContext.ticks_per_frame; for (int dstOffset = 0; dstOffset < dstSize; dstOffset += stride) { Marshal.Copy(pFrameBytes, data, dstOffset, stride); pFrameBytes += srcStride; } } else { data = new byte[stride * frameInfo.height]; IntPtr pFrameBytes = frameInfo.data[0]; Marshal.Copy(frameInfo.data[0], data, 0, stride * frameInfo.height); } _yDataBytes.Add(fieldIndex, data); } return(new ImageProcessing.YData(width, height, stride, data)); }
// Specify -1 to get full interlaced frame public Bitmap GetImage(int fieldIndex = -1) { FFmpeg.AVFrame frameInfo = _dstFrameInfo; FFmpeg.AVPixelFormat dstPixelFormat = _dstAVPixelFormat[(int)DstPixelFormat]; if (_srcCodecContext.pix_fmt == dstPixelFormat) { frameInfo = _srcFrameInfo; } if (DstPixelFormat == PixelFormat.RGB24) { IntPtr pFrameBytes = frameInfo.data[0]; int frameBytesLength = frameInfo.linesize[0] * frameInfo.height; if ((fieldIndex != -1) && (frameInfo.interlaced_frame != 0)) { var height = DstSize.Height / _srcCodecContext.ticks_per_frame; var image = new System.Drawing.Bitmap(DstSize.Width, height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); var imageData = image.LockBits(new System.Drawing.Rectangle(0, 0, DstSize.Width, height), System.Drawing.Imaging.ImageLockMode.WriteOnly, image.PixelFormat); int firstRow = fieldIndex; if (frameInfo.top_field_first != 0) { firstRow = _srcCodecContext.ticks_per_frame - fieldIndex; } int stride = frameInfo.linesize[0]; int srcSride = stride * _srcCodecContext.ticks_per_frame; IntPtr pSrc = pFrameBytes + (stride * firstRow); int pDst = 0; int pDstEnd = pDst + (stride * height); while (pDst < pDstEnd) { memcpy(imageData.Scan0 + pDst, pSrc, frameInfo.linesize[0]); pSrc += srcSride; pDst += stride; } image.UnlockBits(imageData); return(image); } else { var image = new System.Drawing.Bitmap(DstSize.Width, DstSize.Height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); var imageData = image.LockBits(new System.Drawing.Rectangle(0, 0, DstSize.Width, DstSize.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, image.PixelFormat); memcpy(imageData.Scan0, pFrameBytes, frameBytesLength); image.UnlockBits(imageData); return(image); } } else if (DstPixelFormat == PixelFormat.Y) { var yData = GetYData(fieldIndex); return(yData.GetBitmap()); } return(null); }
private PendingFrame ProcessFrame() { _frameOrig = (FFmpeg.AVFrame)Marshal.PtrToStructure(_pFrameOrig, typeof(FFmpeg.AVFrame)); IntPtr pStream = Marshal.ReadIntPtr(_formatContext.streams, _videoStreamIndex * 4); var stream = (FFmpeg.AVStream)Marshal.PtrToStructure(pStream, typeof(FFmpeg.AVStream)); _videoCodecContext = (FFmpeg.AVCodecContext)Marshal.PtrToStructure(stream.codec, typeof(FFmpeg.AVCodecContext)); if (_videoFrameFirstDTS == null) { _videoFrameFirstDTS = _frameOrig.pkt_dts; } if (_videoFieldDTSDuration == null) { _videoFieldDTSDuration = _frameOrig.pkt_duration / _videoCodecContext.ticks_per_frame; } var fieldList = new List <FrameField>(); //---------- Start YUV Image ---------- if (OutputYData || OutputYImage) { _scalerY.ProcessImage(_pFrameOrig, _frameOrig); } //---------- End YUV Image ---------- //---------- Start RGB Image ---------- if (OutputRGBImage && !OutputYImage) { _scalerRGB.ProcessImage(_pFrameOrig, _frameOrig); } //---------- End RGB Image ---------- for (int fieldIndex = 0; fieldIndex < _videoCodecContext.ticks_per_frame; fieldIndex++) { var field = new FrameField(); if (OutputYData || OutputYImage) { field.YData = _scalerY.GetYData(fieldIndex); if (OutputYImage) { field.Image = _scalerY.GetImage(fieldIndex); //field.Image.Save(@"D:\temp\image.png", System.Drawing.Imaging.ImageFormat.Png); } } if (OutputRGBImage && !OutputYImage) { field.Image = _scalerRGB.GetImage(fieldIndex); //field.Image.Save(@"D:\temp\image.png", System.Drawing.Imaging.ImageFormat.Png); } field.PTS = (_frameOrig.pkt_dts - _videoFrameFirstDTS.Value) + (fieldIndex * _videoFieldDTSDuration.Value); field.Seconds = field.PTS * _videoCodecContext.pkt_timebase.num / (double)_videoCodecContext.pkt_timebase.den; field.FieldIndex = fieldIndex; field.FieldNumber = (int)(field.PTS / _videoFieldDTSDuration.Value); field.FrameNumber = (int)(field.FieldNumber / _videoCodecContext.ticks_per_frame); field.FilePosition = _frameOrig.pkt_pos; field.AVFrame = _frameOrig; fieldList.Add(field); } var frame = new PendingFrame(); frame.Fields = fieldList.ToArray(); return(frame); }
void Main(String pathToVideo) { FFmpeg.avcodec_init(); FFmpeg.av_register_all(); // find the h264 video encoder IntPtr pcodec = FFmpeg.avcodec_find_encoder(FFmpeg.CodecID.CODEC_ID_MPEG1VIDEO); if (pcodec == IntPtr.Zero) { Console.WriteLine("codec not found"); return; } IntPtr pcontext = FFmpeg.avcodec_alloc_context(); IntPtr ppicture = FFmpeg.avcodec_alloc_frame(); FFmpeg.AVCodecContext context = (FFmpeg.AVCodecContext)Marshal.PtrToStructure(pcontext, typeof(FFmpeg.AVCodecContext)); // put sample parameters context.bit_rate = 400000; // resolution must be a multiple of two context.width = 352; context.height = 288; // frames per second context.time_base.den = 25; //(AVRational){1,25}; context.time_base.num = 1; context.gop_size = 10; // emit one intra frame every ten frames context.max_b_frames = 1; context.pix_fmt = FFmpeg.PixelFormat.PIX_FMT_YUV420P; Marshal.StructureToPtr(context, pcontext, false); // open it if (FFmpeg.avcodec_open(pcontext, pcodec) < 0) { Console.WriteLine("could not open codec"); return; } // alloc image and output buffer int outbuf_size = 100000; byte[] outbuf = new byte[outbuf_size]; int size = context.width * context.height; byte[] picture_buf = new byte[(size * 3) / 2]; // size for YUV 420 FFmpeg.AVFrame picture = (FFmpeg.AVFrame)Marshal.PtrToStructure(ppicture, typeof(FFmpeg.AVFrame)); picture.data[0] = Marshal.UnsafeAddrOfPinnedArrayElement(picture_buf, 0); picture.data[1] = Marshal.UnsafeAddrOfPinnedArrayElement(picture_buf, size); // picture.data[0] + size; picture.data[2] = Marshal.UnsafeAddrOfPinnedArrayElement(picture_buf, size + size / 4); // picture.data[1] + size / 4; picture.linesize[0] = context.width; picture.linesize[1] = context.width / 2; picture.linesize[2] = context.width / 2; int data1offset = size; int data2offset = size + size / 4; Marshal.StructureToPtr(picture, ppicture, false); System.IO.FileStream fstream = new System.IO.FileStream(@"C:\inetpub\wwwroot\Cache\000001.mp4", System.IO.FileMode.Create); int i, out_size = 0, x, y; // encode 1 second of video for (i = 0; i < 250; i++) { // prepare a dummy image // Y for (y = 0; y < context.height; y++) { for (x = 0; x < context.width; x++) { picture_buf[y * picture.linesize[0] + x] = (byte)(x + y + i * 3); // picture.data[0][y * picture.linesize[0] + x] = x + y + i * 3; } } // Cb and Cr for (y = 0; y < context.height / 2; y++) { for (x = 0; x < context.width / 2; x++) { picture_buf[data1offset + y * picture.linesize[1] + x] = (byte)(128 + y + i * 2); // picture.data[1][y * picture.linesize[1] + x] = 128 + y + i * 2; picture_buf[data2offset + y * picture.linesize[2] + x] = (byte)(64 + x + i * 5); // picture.data[2][y * picture.linesize[2] + x] = 64 + x + i * 5; } } // encode the image out_size = FFmpeg.avcodec_encode_video(pcontext, Marshal.UnsafeAddrOfPinnedArrayElement(outbuf, 0), outbuf_size, ppicture); Console.WriteLine("encoding frame {0:D3} (size={1:D5})", i, out_size); fstream.Write(outbuf, 0, out_size); // fwrite(outbuf, 1, out_size, f); } // get the delayed frames for (; out_size > 0; i++) { out_size = FFmpeg.avcodec_encode_video(pcontext, Marshal.UnsafeAddrOfPinnedArrayElement(outbuf, 0), outbuf_size, IntPtr.Zero); Console.WriteLine("encoding frame {0:D3} (size={1:D5})", i, out_size); fstream.Write(outbuf, 0, out_size); // fwrite(outbuf, 1, out_size, f); } // add sequence end code to have a real mpeg file outbuf[0] = 0x00; outbuf[1] = 0x00; outbuf[2] = 0x01; outbuf[3] = 0xb7; fstream.Write(outbuf, 0, 4); // fwrite(outbuf, 1, 4, f); fstream.Close(); FFmpeg.avcodec_close(pcontext); FFmpeg.av_free(pcontext); FFmpeg.av_free(ppicture); }