コード例 #1
0
 private void Initialize(int width, int height, AVPixelFormat inFormat)
 {
   _initialized = true;
   _pContext = FFmpegInvoke.sws_getContext(width, height, inFormat, width, height, _pixelFormat, FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);
   if (_pContext == null)
     throw new VideoConverterException("Could not initialize the conversion context.");
   _pCurrentFrame = FFmpegInvoke.avcodec_alloc_frame();
   int outputDataSize = FFmpegInvoke.avpicture_get_size(_pixelFormat, width, height);
   _outputData = new byte[outputDataSize];
   fixed (byte* pOutputData = &_outputData[0])
   {
     FFmpegInvoke.avpicture_fill((AVPicture*)_pCurrentFrame, pOutputData, _pixelFormat, width, height);
   }
 }
コード例 #2
0
 public VideoFrame(int width, int height, AVPixelFormat format, int align = 0) : base()
 {
     AllocBuffer(width, height, format, align);
 }
コード例 #3
0
 public static extern int av_image_copy_to_buffer(byte* dst, int dst_size, byte** src_data, int* src_linesize, AVPixelFormat pix_fmt, int width, int height, int align);
コード例 #4
0
 public static extern int av_image_alloc(byte** pointers, int* linesizes, int w, int h, AVPixelFormat pix_fmt, int align);
コード例 #5
0
 public static extern AVPixelFormat av_find_best_pix_fmt_of_2(AVPixelFormat dst_pix_fmt1, AVPixelFormat dst_pix_fmt2, AVPixelFormat src_pix_fmt, int has_alpha, int* loss_ptr);
コード例 #6
0
 public static extern int av_pix_fmt_get_chroma_sub_sample(AVPixelFormat pix_fmt, int* h_shift, int* v_shift);
コード例 #7
0
 public static extern AVFilterBufferRef* avfilter_get_video_buffer_ref_from_arrays(byte** data, int* linesize, int perms, int w, int h, AVPixelFormat format);
コード例 #8
0
 public static extern AVPixelFormat avcodec_find_best_pix_fmt_of_list(AVPixelFormat* pix_fmt_list, AVPixelFormat src_pix_fmt, int has_alpha, int* loss_ptr);
コード例 #9
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern void av_picture_copy(AVPicture *dst, AVPicture *src, AVPixelFormat pix_fmt, int width, int height);
コード例 #10
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern int avpicture_layout(AVPicture *src, AVPixelFormat pix_fmt, int width, int height, byte *dest, int dest_size);
コード例 #11
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern int avpicture_fill(AVPicture *picture, byte *ptr, AVPixelFormat pix_fmt, int width, int height);
コード例 #12
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern int avpicture_alloc(AVPicture *picture, AVPixelFormat pix_fmt, int width, int height);
コード例 #13
0
        public unsafe VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat)
        {
            this._destinationSize = destinationSize;
            this._pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat, destinationSize.Width, destinationSize.Height, destinationPixelFormat, 1, null, null, null);
            if (this._pConvertContext == null)
            {
                throw new ApplicationException("Could not initialize the conversion context.");
            }
            int convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);

            this._convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            this._dstData     = default(byte_ptrArray4);
            this._dstLinesize = default(int_array4);
            ffmpeg.av_image_fill_arrays(ref this._dstData, ref this._dstLinesize, (byte *)(void *)this._convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);
        }
コード例 #14
0
 public static extern int av_image_fill_arrays(ref byte_ptrArray4 dst_data, ref int_array4 dst_linesize, byte *src, AVPixelFormat pix_fmt, int width, int height, int align);
コード例 #15
0
 public void Init(int width, int height, AVPixelFormat format, int align = 0)
 {
     Clear();
     AllocBuffer(width, height, format, align);
 }
コード例 #16
0
 public static extern int av_picture_crop(AVPicture* dst, AVPicture* src, AVPixelFormat pix_fmt, int top_band, int left_band);
コード例 #17
0
 public static extern void avcodec_get_chroma_sub_sample(AVPixelFormat pix_fmt, int* h_shift, int* v_shift);
コード例 #18
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern int av_picture_crop(AVPicture *dst, AVPicture *src, AVPixelFormat pix_fmt, int top_band, int left_band);
コード例 #19
0
 public static extern int av_opt_set_pixel_fmt(void* obj, String name, AVPixelFormat fmt, int search_flags);
コード例 #20
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern int av_picture_pad(AVPicture *dst, AVPicture *src, int height, int width, AVPixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright, int *color);
コード例 #21
0
 public static extern String av_get_pix_fmt_string(String buf, int buf_size, AVPixelFormat pix_fmt);
コード例 #22
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern void avcodec_get_chroma_sub_sample(AVPixelFormat pix_fmt, int *h_shift, int *v_shift);
コード例 #23
0
 public static extern AVPixelFormat av_pix_fmt_swap_endianness(AVPixelFormat pix_fmt);
コード例 #24
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern uint avcodec_pix_fmt_to_codec_tag(AVPixelFormat pix_fmt);
コード例 #25
0
 public static extern int av_image_fill_linesizes(int* linesizes, AVPixelFormat pix_fmt, int width);
コード例 #26
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern int avcodec_get_pix_fmt_loss(AVPixelFormat dst_pix_fmt, AVPixelFormat src_pix_fmt, int has_alpha);
コード例 #27
0
 public static extern int av_image_fill_arrays(byte** dst_data, int* dst_linesize, byte* src, AVPixelFormat pix_fmt, int width, int height, int align);
コード例 #28
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern AVPixelFormat avcodec_find_best_pix_fmt_of_list(AVPixelFormat *pix_fmt_list, AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr);
コード例 #29
0
 public static extern int sws_isSupportedEndiannessConversion(AVPixelFormat pix_fmt);
コード例 #30
0
ファイル: AVCodec.cs プロジェクト: zsybupt/SaarFFmpeg
 public static extern AVPixelFormat avcodec_find_best_pix_fmt2(AVPixelFormat dst_pix_fmt1, AVPixelFormat dst_pix_fmt2, AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr);
コード例 #31
0
 public static extern void av_picture_copy(AVPicture* dst, AVPicture* src, AVPixelFormat pix_fmt, int width, int height);
コード例 #32
0
        public static unsafe byte[]? GetThumbnail(FfmpegSettings settings, bool extendedLogging)
        {
            try {
                if (UseNativeBinding)
                {
                    bool isGrayByte = settings.GrayScale == 1;

                    AVHWDeviceType HWDevice = HardwareAccelerationMode switch {
                        FFHardwareAccelerationMode.vdpau => AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU,
                        FFHardwareAccelerationMode.dxva2 => AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2,
                        FFHardwareAccelerationMode.vaapi => AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI,
                        FFHardwareAccelerationMode.qsv => AVHWDeviceType.AV_HWDEVICE_TYPE_QSV,
                        FFHardwareAccelerationMode.cuda => AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA,
                        _ => AVHWDeviceType.AV_HWDEVICE_TYPE_NONE
                    };

                    using var vsd = new VideoStreamDecoder(settings.File, HWDevice);
                    if (vsd.PixelFormat < 0 || vsd.PixelFormat >= AVPixelFormat.AV_PIX_FMT_NB)
                    {
                        throw new Exception($"Invalid source pixel format");
                    }

                    Size          sourceSize        = vsd.FrameSize;
                    AVPixelFormat sourcePixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE
                                                ? vsd.PixelFormat
                                                : FFmpegHelper.GetHWPixelFormat(HWDevice);
                    Size          destinationSize        = isGrayByte ? new Size(16, 16) : new Size(100, Convert.ToInt32(sourceSize.Height * (100 / (double)sourceSize.Width)));
                    AVPixelFormat destinationPixelFormat = isGrayByte ? AVPixelFormat.AV_PIX_FMT_GRAY8 : AVPixelFormat.AV_PIX_FMT_BGRA;
                    using var vfc =
                              new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat);

                    if (!vsd.TryDecodeFrame(out var frame, settings.Position))
                    {
                        throw new Exception($"Failed decoding frame at {settings.Position}");
                    }
                    AVFrame convertedFrame = vfc.Convert(frame);

                    if (isGrayByte)
                    {
                        int length = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, convertedFrame.width,
                                                                     convertedFrame.height, 1).ThrowExceptionIfError();
                        byte[] data = new byte[length];
                        Marshal.Copy((IntPtr)convertedFrame.data[0], data, 0, length);
                        return(data);
                    }
                    else
                    {
                        int width      = convertedFrame.width;
                        int height     = convertedFrame.height;
                        var totalBytes = width * height * 4;
                        var rgbaBytes  = new byte[totalBytes];
                        int stride     = convertedFrame.linesize[0];
                        if (stride == width * 4)
                        {
                            Marshal.Copy((IntPtr)convertedFrame.data[0], rgbaBytes, 0, totalBytes);
                        }
                        else
                        {
                            var sourceOffset = 0;
                            var destOffset   = 0;
                            var byteWidth    = width * 4;
                            for (var y = 0; y < height; y++)
                            {
                                Marshal.Copy((IntPtr)convertedFrame.data[0] + sourceOffset, rgbaBytes, destOffset, byteWidth);
                                sourceOffset += stride;
                                destOffset   += byteWidth;
                            }
                        }
                        var image = Image.LoadPixelData <SixLabors.ImageSharp.PixelFormats.Bgra32>(rgbaBytes, width, height);
                        using MemoryStream stream = new();
                        image.Save(stream, new SixLabors.ImageSharp.Formats.Jpeg.JpegEncoder());
                        bool equal = rgbaBytes.SequenceEqual(stream.ToArray());
                        return(stream.ToArray());
                    }
                }
            }
            catch (Exception e) {
                Logger.Instance.Info($"Failed using native FFmpeg binding on '{settings.File}', try switching to process mode. Exception: {e}");
            }


            //https://docs.microsoft.com/en-us/dotnet/csharp/how-to/concatenate-multiple-strings#string-literals
            string ffmpegArguments = $" -hide_banner -loglevel {(extendedLogging ? "error" : "quiet")}" +
                                     $" -y -hwaccel {HardwareAccelerationMode} -ss {settings.Position} -i \"{settings.File}\"" +
                                     $" -t 1 -f {(settings.GrayScale == 1 ? "rawvideo -pix_fmt gray" : "mjpeg")} -vframes 1" +
                                     $" {(settings.GrayScale == 1 ? "-s 16x16" : "-vf scale=100:-1")} {CustomFFArguments} \"-\"";

            using var process = new Process {
                      StartInfo = new ProcessStartInfo {
                          Arguments              = ffmpegArguments,
                          FileName               = FFmpegPath,
                          CreateNoWindow         = true,
                          RedirectStandardInput  = false,
                          RedirectStandardOutput = true,
                          WorkingDirectory       = Path.GetDirectoryName(FFmpegPath) !,
                          RedirectStandardError  = extendedLogging,
                          WindowStyle            = ProcessWindowStyle.Hidden
                      }
                  };
            string errOut = string.Empty;

            byte[]? bytes = null;
            try {
                process.EnableRaisingEvents = true;
                process.Start();
                if (extendedLogging)
                {
                    process.ErrorDataReceived += new DataReceivedEventHandler((sender, e) => {
                        if (e.Data?.Length > 0)
                        {
                            errOut += Environment.NewLine + e.Data;
                        }
                    });
                    process.BeginErrorReadLine();
                }
                using var ms = new MemoryStream();
                process.StandardOutput.BaseStream.CopyTo(ms);

                if (!process.WaitForExit(TimeoutDuration))
                {
                    throw new TimeoutException($"FFmpeg timed out on file: {settings.File}");
                }
                else if (extendedLogging)
                {
                    process.WaitForExit();                     // Because of asynchronous event handlers, see: https://github.com/dotnet/runtime/issues/18789
                }
                if (process.ExitCode != 0)
                {
                    throw new FFInvalidExitCodeException($"FFmpeg exited with: {process.ExitCode}");
                }

                bytes = ms.ToArray();
                if (bytes.Length == 0)
                {
                    bytes = null;                       // Makes subsequent checks easier
                }
                else if (settings.GrayScale == 1 && bytes.Length != 256)
                {
                    bytes   = null;
                    errOut += $"{Environment.NewLine}graybytes length != 256";
                }
            }
            catch (Exception e) {
                errOut += $"{Environment.NewLine}{e.Message}";
                try {
                    if (process.HasExited == false)
                    {
                        process.Kill();
                    }
                }
                catch { }
                bytes = null;
            }
            if (bytes == null || errOut.Length > 0)
            {
                string message = $"{((bytes == null) ? "ERROR: Failed to retrieve" : "WARNING: Problems while retrieving")} {(settings.GrayScale == 1 ? "graybytes" : "thumbnail")} from: {settings.File}";
                if (extendedLogging)
                {
                    message += $":{Environment.NewLine}{FFmpegPath} {ffmpegArguments}";
                }
                Logger.Instance.Info($"{message}{errOut}");
            }
            return(bytes);
        }
コード例 #33
0
 public static extern int av_picture_pad(AVPicture* dst, AVPicture* src, int height, int width, AVPixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright, int* color);
コード例 #34
0
ファイル: MediaEncode.cs プロジェクト: sdcb/EmguFFmpeg
 public static MediaEncode CreateVideoEncode(OutFormat oformat, int width, int height, int fps, long bitRate = 0, AVPixelFormat format = AVPixelFormat.AV_PIX_FMT_NONE)
 {
     return(CreateVideoEncode(oformat.VideoCodec, oformat.Flags, width, height, fps, bitRate, format));
 }
コード例 #35
0
 public static extern int avcodec_pix_fmt_to_codec_tag(AVPixelFormat pix_fmt);
コード例 #36
0
ファイル: MediaEncode.cs プロジェクト: sdcb/EmguFFmpeg
 /// <summary>
 /// Create and init video encode
 /// </summary>
 /// <param name="videoCodec"></param>
 /// <param name="flags"><see cref="MediaFormat.Flags"/></param>
 /// <param name="width">width pixel, must be greater than 0</param>
 /// <param name="height">height pixel, must be greater than 0</param>
 /// <param name="fps">fps, must be greater than 0</param>
 /// <param name="bitRate">default is auto bit rate, must be greater than or equal to 0</param>
 /// <param name="format">default is first supported pixel format</param>
 /// <returns></returns>
 public static MediaEncode CreateVideoEncode(AVCodecID videoCodec, int flags, int width, int height, int fps, long bitRate = 0, AVPixelFormat format = AVPixelFormat.AV_PIX_FMT_NONE)
 {
     unsafe
     {
         return(CreateEncode(videoCodec, flags, _ =>
         {
             AVCodecContext *pCodecContext = _;
             if (width <= 0 || height <= 0 || fps <= 0 || bitRate < 0)
             {
                 throw new FFmpegException(FFmpegException.NonNegative);
             }
             if (_.SupportedPixelFmts.Count() <= 0)
             {
                 throw new FFmpegException(FFmpegException.NotSupportCodecId);
             }
             if (format == AVPixelFormat.AV_PIX_FMT_NONE)
             {
                 format = _.SupportedPixelFmts[0];
             }
             else if (_.SupportedPixelFmts.Where(__ => __ == format).Count() <= 0)
             {
                 throw new FFmpegException(FFmpegException.NotSupportFormat);
             }
             pCodecContext->width = width;
             pCodecContext->height = height;
             pCodecContext->time_base = new AVRational {
                 num = 1, den = fps
             };
             pCodecContext->pix_fmt = format;
             pCodecContext->bit_rate = bitRate;
         }));
     }
 }
コード例 #37
0
 public static extern AVPixelFormat avcodec_default_get_format(AVCodecContext* s, AVPixelFormat* fmt);
コード例 #38
0
        public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat)
        {
            _destinationSize = destinationSize;

            _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat,
                                                     destinationSize.Width,
                                                     destinationSize.Height, destinationPixelFormat,
                                                     ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            if (_pConvertContext == null)
            {
                throw new ApplicationException("Could not initialize the conversion context.");
            }

            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);

            _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            _dstData     = new byte_ptrArray4();
            _dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);
        }
コード例 #39
0
 public static extern int av_opt_get_pixel_fmt(void* obj, String name, int search_flags, AVPixelFormat* out_fmt);
コード例 #40
0
 public static extern int sws_isSupportedOutput(AVPixelFormat @pix_fmt);
コード例 #41
0
 public static extern String av_get_pix_fmt_name(AVPixelFormat pix_fmt);
コード例 #42
0
 public static extern int sws_isSupportedEndiannessConversion(AVPixelFormat @pix_fmt);
コード例 #43
0
 public static extern AVPixFmtDescriptor* av_pix_fmt_desc_get(AVPixelFormat pix_fmt);
コード例 #44
0
 public static extern SwsContext *sws_getContext(int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter * @srcFilter, SwsFilter * @dstFilter, double * @param);
コード例 #45
0
 public static extern int av_pix_fmt_count_planes(AVPixelFormat pix_fmt);
コード例 #46
0
 private static extern SwsContext *sws_getContext(int srcW, int srcH, AVPixelFormat srcFormat, int dstW, int dstH, AVPixelFormat dstFormat, int flags, SwsFilter *srcFilter, SwsFilter *dstFilter, double *param);
コード例 #47
0
 public static extern int av_get_pix_fmt_loss(AVPixelFormat dst_pix_fmt, AVPixelFormat src_pix_fmt, int has_alpha);
コード例 #48
0
ファイル: FFmpeg.cs プロジェクト: polytronicgr/MotionTK
 internal static extern SwsContext *sws_getCachedContext(SwsContext *context, int srcW, int srcH, AVPixelFormat srcFormat, int dstW, int dstH, AVPixelFormat dstFormat, int flags, SwsFilter *srcFilter, SwsFilter *dstFilter, double *param);
コード例 #49
0
 public static extern int av_image_get_linesize(AVPixelFormat pix_fmt, int width, int plane);
コード例 #50
0
ファイル: FFmpeg.cs プロジェクト: polytronicgr/MotionTK
 internal static extern int av_image_get_buffer_size(AVPixelFormat pixFmt, int width, int height, int align);
コード例 #51
0
 public static extern int av_image_fill_pointers(byte** data, AVPixelFormat pix_fmt, int height, byte* ptr, int* linesizes);
コード例 #52
0
ファイル: FFmpeg.cs プロジェクト: polytronicgr/MotionTK
 internal static extern int av_image_fill_arrays(ref BytePtrArray4 dstData, ref IntArray4 dstLinesize, byte *src, AVPixelFormat pixFmt, int width, int height, int align);
コード例 #53
0
 public static extern void av_image_copy(byte** dst_data, int* dst_linesizes, byte** src_data, int* src_linesizes, AVPixelFormat pix_fmt, int width, int height);
コード例 #54
0
ファイル: MediaWriter.cs プロジェクト: rniebecker/iSpy
        public void WriteFrame(Bitmap frame, int level, long msOffset)
        {
            if (!_opened)
            {
                throw new Exception("An audio file was not opened yet.");
            }
            if (ffmpeg.avcodec_is_open(_videoCodecContext) <= 0)
            {
                throw new Exception("codec is not open");
            }

            if ((frame.Width != _videoCodecContext->width) || (frame.Height != _videoCodecContext->height))
            {
                throw new Exception("Bitmap size must be of the same as video size, which was specified on opening video file.");
            }

            BitmapData bitmapData = frame.LockBits(new Rectangle(0, 0, _width, _height), ImageLockMode.ReadOnly,
                                                   (frame.PixelFormat == PixelFormat.Format8bppIndexed) ? PixelFormat.Format8bppIndexed : PixelFormat.Format24bppRgb);

            var ptr         = (sbyte *)bitmapData.Scan0;
            int srcLinesize = bitmapData.Stride;

            if (_swsContext == null)
            {
                AVPixelFormat pfmt = AVPixelFormat.AV_PIX_FMT_BGR24;

                if (frame.PixelFormat == PixelFormat.Format8bppIndexed)
                {
                    pfmt = AVPixelFormat.AV_PIX_FMT_GRAY8;
                }

                _swsContext = ffmpeg.sws_getCachedContext(_swsContext, _videoCodecContext->width, _videoCodecContext->height, pfmt, _videoCodecContext->width, _videoCodecContext->height, _videoCodecContext->pix_fmt, ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            }
            int h = ffmpeg.sws_scale(_swsContext, &ptr, &srcLinesize, 0, _videoCodecContext->height, &_videoFrame->data0, _videoFrame->linesize);

            frame.UnlockBits(bitmapData);

            if (h <= 0)
            {
                throw new Exception("Error scaling image");
            }


            if (!_isConstantFramerate)
            {
                var pts = msOffset;
                _videoFrame->pts = pts;
            }
            else
            {
                _videoFrame->pts = _frameNumber;
            }
            _frameNumber++;


            int      ret;
            AVPacket packet = new AVPacket();

            ffmpeg.av_init_packet(&packet);

            if ((_formatContext->oformat->flags & ffmpeg.AVFMT_RAWPICTURE) == ffmpeg.AVFMT_RAWPICTURE)
            {
                packet.flags       |= ffmpeg.AV_PKT_FLAG_KEY;
                packet.stream_index = _videoStream->index;
                packet.data         = _videoFrame->data0;
                packet.size         = sizeof(AVPicture);
                ret = ffmpeg.av_interleaved_write_frame(_formatContext, &packet);
            }
            else
            {
                int gotPacket;
                packet.data = null;
                packet.size = 0;

                ret = ffmpeg.avcodec_encode_video2(_videoCodecContext, &packet, _videoFrame, &gotPacket);
                if (ret < 0)
                {
                    ffmpeg.av_free_packet(&packet);
                    throw new Exception("Error while writing video frame (" + ret + ")");
                }

                if (gotPacket > 0 && packet.size > 0)
                {
                    if ((ulong)packet.pts != ffmpeg.AV_NOPTS_VALUE)
                    {
                        packet.pts = ffmpeg.av_rescale_q(packet.pts, _videoCodecContext->time_base, _videoStream->time_base);
                    }
                    if ((ulong)packet.dts != ffmpeg.AV_NOPTS_VALUE)
                    {
                        packet.dts = ffmpeg.av_rescale_q(packet.dts, _videoCodecContext->time_base, _videoStream->time_base);
                    }

                    packet.stream_index = _videoStream->index;
                    // write the compressed frame to the media file
                    _lastPacket = DateTime.UtcNow;
                    ret         = ffmpeg.av_interleaved_write_frame(_formatContext, &packet);
                }
            }
            ffmpeg.av_free_packet(&packet);

            if (!_isAudio)
            {
                _alertData.Append(string.Format(CultureInfo.InvariantCulture, "{0:0.000},", Math.Min(level, 100)));
            }

            if (level > _maxLevel)
            {
                MaxAlarm = level;
                MaxFrame?.Dispose();
                MaxFrame  = (Bitmap)frame.Clone();
                _maxLevel = level;
            }

            if (ret != 0)
            {
                throw new Exception("Error while writing video frame (" + ret + ")");
            }
        }
コード例 #55
0
 public static extern int av_image_get_buffer_size(AVPixelFormat pix_fmt, int width, int height, int align);
        private unsafe AVFrame *GetFrame(byte[] bytes, out int width, out int height, out AVPixelFormat pixFormat)
        {
            var webcamFormatContext = ffmpeg.avformat_alloc_context();
            var webcamByteReader    = new ByteReader();
            var bufSize             = 1048576u;
            var webcamBuffer        = ffmpeg.av_malloc(bufSize);

            var webcamAllocContext = ffmpeg.avio_alloc_context((byte *)webcamBuffer, (int)bufSize, 0, null, webcamByteReader.ReadFunc, null, webcamByteReader.SeekFunc);

            if (webcamAllocContext == null)
            {
                throw new NullReferenceException(nameof(webcamAllocContext));
            }

            webcamFormatContext->pb = webcamAllocContext;
            {
                webcamByteReader.Buffer = bytes;
                // Decode image from byte array;
                ffmpeg.avformat_open_input(&webcamFormatContext, "nofile.jpg", null, null).ThrowExceptionIfError();
                ffmpeg.avformat_find_stream_info(webcamFormatContext, null).ThrowExceptionIfError();

                var webcamCodecCtx = webcamFormatContext->streams[0]->codec;

                AVCodec *webcamCodec = ffmpeg.avcodec_find_decoder(webcamCodecCtx->codec_id);

                AVPacket pkt;
                try
                {
                    ffmpeg.avcodec_open2(webcamCodecCtx, webcamCodec, null).ThrowExceptionIfError();
                    var webcamFrame = ffmpeg.av_frame_alloc();
                    while (ffmpeg.av_read_frame(webcamFormatContext, &pkt).ThrowExceptionIfError() >= 0)
                    {
                        if (pkt.stream_index != 0)
                        {
                            continue;
                        }

                        int error = 0;
                        do
                        {
                            ffmpeg.avcodec_send_packet(webcamCodecCtx, &pkt).ThrowExceptionIfError();
                            ffmpeg.avcodec_receive_frame(webcamCodecCtx, webcamFrame).ThrowExceptionIfError();
                        } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));

                        error.ThrowExceptionIfError();

                        width     = webcamCodecCtx->width;
                        height    = webcamCodecCtx->height;
                        pixFormat = webcamCodecCtx->pix_fmt;

                        return(webcamFrame);
                    }
                }
                finally
                {
                    GC.KeepAlive(webcamByteReader);
                    ffmpeg.avcodec_close(webcamCodecCtx);
                    ffmpeg.avformat_close_input(&webcamFormatContext);
                    ffmpeg.avformat_free_context(webcamFormatContext);
                    ffmpeg.av_free(webcamAllocContext->buffer); // webcamBuffer seems like realloc'ed
                    ffmpeg.avio_context_free(&webcamAllocContext);
                    ffmpeg.av_packet_unref(&pkt);
                }

                width     = 0;
                height    = 0;
                pixFormat = 0;
                return(null);
            }
        }
コード例 #57
0
 public static extern int sws_isSupportedOutput(AVPixelFormat pix_fmt);
コード例 #58
0
 public static extern int avpicture_get_size(AVPixelFormat pix_fmt, int width, int height);
コード例 #59
0
 public static extern SwsContext* sws_getCachedContext(SwsContext* context, int srcW, int srcH, AVPixelFormat srcFormat, int dstW, int dstH, AVPixelFormat dstFormat, int flags, SwsFilter* srcFilter, SwsFilter* dstFilter, double* param);
コード例 #60
0
 public ImageEncode(AVPixelFormat pixelFormat, int width, int height)
 {
     PixelFormat = pixelFormat;
     Width       = width;
     Height      = height;
 }