/// <summary> /// reference <see cref="ffmpeg.av_image_copy(ref byte_ptrArray4, ref int_array4, ref byte_ptrArray4, int_array4, AVPixelFormat, int, int)"/> /// </summary> /// <returns></returns> private byte[][] GetVideoData() { unsafe { List <byte[]> result = new List <byte[]>(); AVPixFmtDescriptor *desc = ffmpeg.av_pix_fmt_desc_get((AVPixelFormat)pFrame->format); if (desc == null || (desc->flags & ffmpeg.AV_PIX_FMT_FLAG_HWACCEL) != 0) { throw new FFmpegException(FFmpegException.NotSupportFrame); } if ((desc->flags & ffmpeg.AV_PIX_FMT_FLAG_PAL) != 0 || (desc->flags & ffmpeg.AV_PIX_FMT_FLAG_PSEUDOPAL) != 0) { result.Add(GetVideoPlane((IntPtr)pFrame->data[0], pFrame->linesize[0], pFrame->width, pFrame->height)); if ((desc->flags & ffmpeg.AV_PIX_FMT_FLAG_PAL) != 0 && pFrame->data[1] != null) { byte[] line1 = new byte[4 * 256]; Marshal.Copy((IntPtr)pFrame->data[1], line1, 0, line1.Length); result.Add(line1); } } else { int i, planes_nb = 0; for (i = 0; i < desc->nb_components; i++) { planes_nb = Math.Max(planes_nb, desc->comp[(uint)i].plane + 1); } for (i = 0; i < planes_nb; i++) { int h = pFrame->height; int bwidth = ffmpeg.av_image_get_linesize((AVPixelFormat)pFrame->format, pFrame->width, i); bwidth.ThrowExceptionIfError(); if (i == 1 || i == 2) { h = (int)Math.Ceiling((double)pFrame->height / (1 << desc->log2_chroma_h)); } result.Add(GetVideoPlane((IntPtr)pFrame->data[(uint)i], pFrame->linesize[(uint)i], bwidth, h)); } } return(result.ToArray()); } }
public static extern void av_image_fill_max_pixsteps(int *max_pixsteps_Array4, int *max_pixstep_comps_Array4, AVPixFmtDescriptor *pixdesc);
public static extern AVPixelFormat av_pix_fmt_desc_get_id(AVPixFmtDescriptor *desc);
public static extern AVPixFmtDescriptor *av_pix_fmt_desc_next(AVPixFmtDescriptor *prev);
public static extern int av_get_padded_bits_per_pixel(AVPixFmtDescriptor *pixdesc);
public static extern void av_write_image_line(ushort *src, byte **data_Array4, int *linesize_Array4, AVPixFmtDescriptor *desc, int x, int y, int c, int w);
public static extern void av_read_image_line(ushort *dst, byte **data_Array4, int *linesize_Array4, AVPixFmtDescriptor *desc, int x, int y, int c, int w, int read_pal_component);
public static StreamInfo Get(AVStream *st) { StreamInfo si = new StreamInfo(); si.Type = st->codecpar->codec_type; si.CodecID = st->codecpar->codec_id; si.CodecName = avcodec_get_name(st->codecpar->codec_id); si.StreamIndex = st->index; si.Timebase = av_q2d(st->time_base) * 10000.0 * 1000.0; si.Duration = (long)(st->duration * si.Timebase); si.StartTime = (st->start_time != AV_NOPTS_VALUE) ? (long)(st->start_time * si.Timebase) : 0; si.BitRate = st->codecpar->bit_rate; if (si.Type == AVMEDIA_TYPE_VIDEO) { si.PixelFormat = (AVPixelFormat)Enum.ToObject(typeof(AVPixelFormat), st->codecpar->format); si.PixelFormatStr = si.PixelFormat.ToString().Replace("AV_PIX_FMT_", "").ToLower(); si.PixelFormatType = PixelFormatType.Software_Sws; si.Width = st->codecpar->width; si.Height = st->codecpar->height; si.FPS = av_q2d(st->r_frame_rate); var gcd = Utils.GCD(si.Width, si.Height); si.AspectRatio = new AspectRatio(si.Width / gcd, si.Height / gcd); if (si.PixelFormat != AVPixelFormat.AV_PIX_FMT_NONE) { si.ColorRange = st->codecpar->color_range == AVColorRange.AVCOL_RANGE_JPEG ? "FULL" : "LIMITED"; if (st->codecpar->color_space == AVColorSpace.AVCOL_SPC_BT470BG) { si.ColorSpace = "BT601"; } if (si.Width > 1024 || si.Height >= 600) { si.ColorSpace = "BT709"; } else { si.ColorSpace = "BT601"; } AVPixFmtDescriptor *pixFmtDesc = av_pix_fmt_desc_get((AVPixelFormat)Enum.ToObject(typeof(AVPixelFormat), si.PixelFormat)); si.PixelFormatDesc = pixFmtDesc; var comp0 = pixFmtDesc->comp.ToArray()[0]; var comp1 = pixFmtDesc->comp.ToArray()[1]; var comp2 = pixFmtDesc->comp.ToArray()[2]; si.PixelBits = comp0.depth; si.IsPlanar = (pixFmtDesc->flags & AV_PIX_FMT_FLAG_PLANAR) != 0; si.IsRGB = (pixFmtDesc->flags & AV_PIX_FMT_FLAG_RGB) != 0; si.Comp0Step = comp0.step; si.Comp1Step = comp1.step; si.Comp2Step = comp2.step; bool isYuv = System.Text.RegularExpressions.Regex.IsMatch(si.PixelFormat.ToString(), "YU|YV", System.Text.RegularExpressions.RegexOptions.IgnoreCase); // YUV Planar or Packed with half U/V (No Semi-Planar Support for Software) if (isYuv && pixFmtDesc->nb_components == 3 && (comp0.depth == 8 && comp1.depth == 8 && comp2.depth == 8))/* * ((comp0.step == 1 && comp1.step == 1 && comp2.step == 1) || * (comp0.step == 2 && comp1.step == 4 && comp2.step == 4)))*/ { si.PixelFormatType = PixelFormatType.Software_Handled; } } } else if (si.Type == AVMEDIA_TYPE_AUDIO) { si.SampleFormat = (AVSampleFormat)Enum.ToObject(typeof(AVSampleFormat), st->codecpar->format); si.SampleFormatStr = si.SampleFormat.ToString().Replace("AV_SAMPLE_FMT_", "").ToLower(); si.SampleRate = st->codecpar->sample_rate; si.ChannelLayout = st->codecpar->channel_layout; si.Channels = st->codecpar->channels; si.Bits = st->codecpar->bits_per_coded_sample; byte[] buf = new byte[50]; fixed(byte *bufPtr = buf) { av_get_channel_layout_string(bufPtr, 50, si.Channels, si.ChannelLayout); si.ChannelLayoutStr = Utils.BytePtrToStringUTF8(bufPtr); } } si.Metadata = new Dictionary <string, string>(); AVDictionaryEntry *b = null; while (true) { b = av_dict_get(st->metadata, "", b, AV_DICT_IGNORE_SUFFIX); if (b == null) { break; } si.Metadata.Add(Utils.BytePtrToStringUTF8(b->key), Utils.BytePtrToStringUTF8(b->value)); } foreach (var kv in si.Metadata) { if (kv.Key.ToLower() == "language" || kv.Key.ToLower() == "lang") { si.Language = kv.Value; break; } } return(si); }