Пример #1
0
        private static unsafe void SetupLogging()
        {
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_INFO);

            // do not convert to local function
            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                Console.ForegroundColor = ConsoleColor.Yellow;
                Console.Write("0x{0:x2} ({1}) ", level, (LogLevel)level);
                Console.Write(line);
                Console.ResetColor();
            };

            ffmpeg.av_log_set_callback(logCallback);
        }
Пример #2
0
        public void Initialize()
        {
            try
            {
                Interop.FFmpeg.av_register_all();
                Interop.FFmpeg.avformat_network_init();
                unsafe
                {
                    Interop.FFmpeg.av_log_set_level(FFmpegMacros.AV_LOG_WARNING);
                    av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
                    {
                        if (level > Interop.FFmpeg.av_log_get_level())
                        {
                            return;
                        }

                        const int lineSize    = 1024;
                        var       lineBuffer  = stackalloc byte[lineSize];
                        var       printPrefix = 1;
                        Interop.FFmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                        var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);

                        Logger.Warn(line);
                    };
                    Interop.FFmpeg.av_log_set_callback(logCallback);
                }
            }
            catch (Exception e)
            {
                Logger.Error(e, "Could not load and register FFmpeg library");
                throw new DemuxerException("Could not load and register FFmpeg library", e);
            }
        }
Пример #3
0
        public static void UseSpecificLogCallback(Boolean storeLogs = true)
        {
            // We clear previous stored logs
            if (storeLogs)
            {
                ClearStoredLogs();
            }

            logCallback = (p0, level, format, vl) =>
            {
                if ((!storeLogs) && (level > ffmpeg.av_log_get_level()))
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                Console.Write(line);
                if (storeLogs)
                {
                    storedLogs += line;
                }
            };
            ffmpeg.av_log_set_callback(logCallback);
        }
Пример #4
0
 /// <summary>
 /// Initializes static members of the <see cref="FFInterop"/> class.
 /// </summary>
 static FFInterop()
 {
     unsafe
     {
         FFmpegLogCallback = OnFFmpegMessageLogged;
     }
 }
Пример #5
0
        static FFmpegContext()
        {
            SetRootPath();

            _logFunc = Log;

            // Redirect log output.
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_MAX_OFFSET);
            ffmpeg.av_log_set_callback(_logFunc);
        }
Пример #6
0
        static unsafe FFmpegSource()
        {
            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();
            ffmpeg.avformat_network_init();
            ffmpeg.avdevice_register_all();
            ffmpeg.avfilter_register_all();

            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
            logCallback = new av_log_set_callback_callback(logCall);
            ffmpeg.av_log_set_callback(logCallback);
        }
Пример #7
0
        public FFmpegContext()
        {
            _logFunc = Log;

            // Redirect log output
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_MAX_OFFSET);
            ffmpeg.av_log_set_callback(_logFunc);

            _codec   = ffmpeg.avcodec_find_decoder(AVCodecID.AV_CODEC_ID_H264);
            _context = ffmpeg.avcodec_alloc_context3(_codec);

            ffmpeg.avcodec_open2(_context, _codec, null);

            _packet = ffmpeg.av_packet_alloc();
        }
        public static void Init(ILogger logger)
        {
            if (_globalLogger == null)
            {
                unsafe
                {
                    _globalLogger = logger;
                    if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
                    {
                        ffmpeg.RootPath = Path.Combine(Environment.CurrentDirectory, "ffmpeg/x86_64");
                    }

                    ffmpeg.av_log_set_level(ffmpeg.AV_LOG_ERROR);
                    // ffmpeg.av_log_set_level(ffmpeg.AV_LOG_MAX_OFFSET);
                    _callback = LogCallback;
                    ffmpeg.av_log_set_callback(_callback);
                }
            }
        }
Пример #9
0
        private unsafe static void SetupLogging()
        {
            ffmpeg.av_log_set_level(40);
            av_log_set_callback_callback logCallback = delegate(void *p0, int level, string format, byte *vl)
            {
                if (level <= ffmpeg.av_log_get_level())
                {
                    int   num  = 1024;
                    byte *ptr  = stackalloc byte[(int)(uint)num];
                    int   num2 = 1;
                    ffmpeg.av_log_format_line(p0, level, format, vl, ptr, num, &num2);
                    string value = Marshal.PtrToStringAnsi((IntPtr)(void *)ptr);
                    Console.ForegroundColor = ConsoleColor.Yellow;
                    Console.Write(value);
                    Console.ResetColor();
                }
            };

            ffmpeg.av_log_set_callback(logCallback);
        }
        private static unsafe void SetupLogging()
        {
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);

            // do not convert to local function
            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
            };

            ffmpeg.av_log_set_callback(logCallback);
        }
Пример #11
0
        private unsafe void RegisterFFmpeg()
        {
            //Only load ffmpeg dll's once to save ram
            if (FFmpegRegistered)
            {
                WriteLine("FFmpeg Already Loaded");
                return;
            }

            WriteLine("Loading FFmpeg");
            NativeMethods.RegisterLibrariesSearchPath(FFmpegPath);

            //Load AVCodec Init
            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();

            WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

            // setup logging @ correct Level Debug gives playback performance issues
            ffmpeg.av_log_set_level(FFmpegLogLevel);

            //Callback for logging
            LogCallback = (p0, level, format, vl) => {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                WriteLine(line);
            };
            ffmpeg.av_log_set_callback(LogCallback);

            //Only load ffmpeg dll's once to save ram
            FFmpegRegistered = true;
        }
Пример #12
0
        private static unsafe void InitFfmpegLog(int level)
        {
            ffmpeg.av_log_set_level(level);
            _logCallback = (p0, l, format, vl) =>
            {
                if (l > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, l, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);

                if (l == ffmpeg.AV_LOG_ERROR)
                {
                    LastError = line;
                }
                MessageReceived?.Invoke(null, new LogEventArgs(line, l));
            };
            ffmpeg.av_log_set_callback(_logCallback);
        }
Пример #13
0
        private static unsafe void Main(string[] args)
        {
            Console.WriteLine(@"Current directory: " + Environment.CurrentDirectory);
            Console.WriteLine(@"Runnung in {0}-bit mode.", Environment.Is64BitProcess ? @"64" : @"32");

            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();
            ffmpeg.avformat_network_init();

            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

            // setup logging
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                Console.Write(line);
            };

            ffmpeg.av_log_set_callback(logCallback);

            // decode N frames from url or path

            //string url = @"../../sample_mpeg4.mp4";
            var url = @"http://www.quirksmode.org/html5/videos/big_buck_bunny.mp4";

            var pFormatContext = ffmpeg.avformat_alloc_context();

            if (ffmpeg.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new ApplicationException(@"Could not open file.");
            }

            if (ffmpeg.avformat_find_stream_info(pFormatContext, null) != 0)
            {
                throw new ApplicationException(@"Could not find stream info");
            }

            AVStream *pStream = null;

            for (var i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new ApplicationException(@"Could not found video stream.");
            }


            var codecContext = *pStream->codec;

            Console.WriteLine($"codec name: {ffmpeg.avcodec_get_name(codecContext.codec_id)}");

            var width             = codecContext.width;
            var height            = codecContext.height;
            var sourcePixFmt      = codecContext.pix_fmt;
            var codecId           = codecContext.codec_id;
            var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;
            var pConvertContext   = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                                                          width, height, destinationPixFmt,
                                                          ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new ApplicationException(@"Could not initialize the conversion context.");
            }

            var pConvertedFrame          = ffmpeg.av_frame_alloc();
            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
            var convertedFrameBufferPtr  = Marshal.AllocHGlobal(convertedFrameBufferSize);
            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)convertedFrameBufferPtr, destinationPixFmt, width, height, 1);

            var pCodec = ffmpeg.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new ApplicationException(@"Unsupported codec.");
            }

            var pCodecContext = &codecContext;

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            if (ffmpeg.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new ApplicationException(@"Could not open codec.");
            }

            var pDecodedFrame = ffmpeg.av_frame_alloc();

            var packet  = new AVPacket();
            var pPacket = &packet;

            ffmpeg.av_init_packet(pPacket);

            var frameNumber = 0;

            while (frameNumber < 200)
            {
                try
                {
                    if (ffmpeg.av_read_frame(pFormatContext, pPacket) < 0)
                    {
                        throw new ApplicationException(@"Could not read frame.");
                    }

                    if (pPacket->stream_index != pStream->index)
                    {
                        continue;
                    }

                    if (ffmpeg.avcodec_send_packet(pCodecContext, pPacket) < 0)
                    {
                        throw new ApplicationException($@"Error while sending packet {frameNumber}.");
                    }

                    if (ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame) < 0)
                    {
                        throw new ApplicationException($@"Error while receiving frame {frameNumber}.");
                    }

                    Console.WriteLine($@"frame: {frameNumber}");

                    ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
                }
                finally
                {
                    ffmpeg.av_packet_unref(pPacket);
                    ffmpeg.av_frame_unref(pDecodedFrame);
                }

#if !NETCOREAPP2_0
                using (var bitmap = new System.Drawing.Bitmap(width, height, dstLinesize[0], System.Drawing.Imaging.PixelFormat.Format24bppRgb, convertedFrameBufferPtr))
                    bitmap.Save(@"frame.buffer.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
#endif

                frameNumber++;
            }

            Marshal.FreeHGlobal(convertedFrameBufferPtr);
            ffmpeg.av_free(pConvertedFrame);
            ffmpeg.sws_freeContext(pConvertContext);

            ffmpeg.av_free(pDecodedFrame);
            ffmpeg.avcodec_close(pCodecContext);
            ffmpeg.avformat_close_input(&pFormatContext);
        }
Пример #14
0
        public AudioDecoder(DecoderSettings settings, string path, Stream IO)
        {
            m_settings = settings;

            _path = path;

            m_stream = (IO != null) ? IO : new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read);

            switch (Environment.OSVersion.Platform)
            {
            case PlatformID.Win32NT:
            case PlatformID.Win32S:
            case PlatformID.Win32Windows:
                var myPath  = new Uri(typeof(AudioDecoder).Assembly.CodeBase).LocalPath;
                var current = System.IO.Path.GetDirectoryName(myPath);
                var probe   = Environment.Is64BitProcess ? "x64" : "win32";
                while (current != null)
                {
                    var ffmpegDirectory = System.IO.Path.Combine(current, probe);
                    if (Directory.Exists(ffmpegDirectory))
                    {
                        System.Diagnostics.Trace.WriteLine($"FFmpeg binaries found in: {ffmpegDirectory}");
                        RegisterLibrariesSearchPath(ffmpegDirectory);
                        break;
                    }
                    current = Directory.GetParent(current)?.FullName;
                }
                break;
                //case PlatformID.Unix:
                //case PlatformID.MacOSX:
                //    var libraryPath = Environment.GetEnvironmentVariable(LD_LIBRARY_PATH);
                //    RegisterLibrariesSearchPath(libraryPath);
                //    break;
            }

            pkt = ffmpeg.av_packet_alloc();
            if (pkt == null)
            {
                throw new Exception("Unable to initialize the decoder");
            }

            decoded_frame = ffmpeg.av_frame_alloc();
            if (decoded_frame == null)
            {
                throw new Exception("Could not allocate audio frame");
            }

            //ffmpeg.avcodec_register_all();
            ffmpeg.av_register_all();

#if DEBUG
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_DEBUG);

            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                System.Diagnostics.Trace.Write(line);
            };

            ffmpeg.av_log_set_callback(logCallback);
#endif

            m_read_packet_callback = readPacketCallback;
            m_seek_callback        = seekCallback;

            int ret;
            AVFormatContext *new_fmt_ctx = ffmpeg.avformat_alloc_context();
            if (new_fmt_ctx == null)
            {
                throw new Exception("ffmpeg.avformat_alloc_context() failed");
            }

            ulong avio_ctx_buffer_size = 65536;
            void *avio_ctx_buffer      = ffmpeg.av_malloc(avio_ctx_buffer_size);

            AVIOContext *avio_ctx = ffmpeg.avio_alloc_context((byte *)avio_ctx_buffer, (int)avio_ctx_buffer_size,
                                                              0, null, m_read_packet_callback, null, m_seek_callback);
            if (avio_ctx == null)
            {
                ffmpeg.avformat_free_context(new_fmt_ctx);
                throw new Exception("Cannot find stream information");
            }

            new_fmt_ctx->pb = avio_ctx;

            AVInputFormat *fmt = ffmpeg.av_find_input_format(m_settings.Format);
            if (fmt == null)
            {
                ffmpeg.avformat_free_context(new_fmt_ctx);
                throw new Exception($"Cannot find input format ${m_settings.Format}");
            }

            if ((ret = ffmpeg.avformat_open_input(&new_fmt_ctx, null, fmt, null)) < 0)
            {
                ffmpeg.avformat_free_context(new_fmt_ctx);
                ret.ThrowExceptionIfError();
            }

            if ((ret = ffmpeg.avformat_find_stream_info(new_fmt_ctx, null)) < 0)
            {
                ffmpeg.avformat_close_input(&new_fmt_ctx);
                ret.ThrowExceptionIfError();
            }

#if FINDBESTSTREAM
            /* select the audio stream */
            ret = ffmpeg.av_find_best_stream(new_fmt_ctx, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &dec, 0);
            if (ret < 0)
            {
                ffmpeg.avformat_close_input(&new_fmt_ctx);
                ret.ThrowExceptionIfError();
            }
#endif
            int matching_stream  = -1;
            int matching_streams = 0;
            for (int i = 0; i < (int)new_fmt_ctx->nb_streams; i++)
            {
                AVStream *stream_i = new_fmt_ctx->streams[i];
                if (stream_i->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO &&
                    (settings.StreamId == 0 || settings.StreamId == stream_i->id))
                {
                    matching_stream = i;
                    matching_streams++;
                }
            }

            if (matching_streams == 0)
            {
                ffmpeg.avformat_close_input(&new_fmt_ctx);
                throw new Exception("No matching streams");
            }
            if (matching_streams != 1)
            {
                ffmpeg.avformat_close_input(&new_fmt_ctx);
                throw new Exception("More than one stream matches");
            }

            stream = new_fmt_ctx->streams[matching_stream];
            // Duration is unreliable for most codecs.
            //if (stream->duration > 0)
            //    _sampleCount = stream->duration;
            //else
            _sampleCount = -1;

            int bps = stream->codecpar->bits_per_raw_sample != 0 ?
                      stream->codecpar->bits_per_raw_sample :
                      stream->codecpar->bits_per_coded_sample;
            int   channels       = stream->codecpar->channels;
            int   sample_rate    = stream->codecpar->sample_rate;
            ulong channel_layout = stream->codecpar->channel_layout;
            pcm = new AudioPCMConfig(bps, channels, sample_rate, (AudioPCMConfig.SpeakerConfig)channel_layout);

            fmt_ctx = new_fmt_ctx;

            codec = ffmpeg.avcodec_find_decoder(stream->codecpar->codec_id);
            if (codec == null)
            {
                throw new Exception("Codec not found");
            }

            c = ffmpeg.avcodec_alloc_context3(codec);
            if (c == null)
            {
                throw new Exception("Could not allocate audio codec context");
            }
            // ffmpeg.av_opt_set_int(c, "refcounted_frames", 1, 0);
            ffmpeg.avcodec_parameters_to_context(c, stream->codecpar);

            c->request_sample_fmt = AVSampleFormat.AV_SAMPLE_FMT_S32;

            /* open it */
            if (ffmpeg.avcodec_open2(c, null, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            m_decoded_frame_offset = 0;
            m_decoded_frame_size   = 0;
            _sampleOffset          = 0;
        }
Пример #15
0
        //private static unsafe void Main(string[] args)
        //{
        //    Console.WriteLine("Decoding video...");
        //    var frames = DecodeVideo();
        //    Console.WriteLine("Decoded video successfully.");

        //    //Console.WriteLine("Encoding frames...");
        //    //var encoder = new EncodeMultipleBitmaps();
        //    //encoder.video_encode_example(
        //    //    @"ThisIsATest.h264",
        //    //    (int)AVCodecID.AV_CODEC_ID_H264);

        //    //Console.WriteLine("Encoded frames successfully...");

        //}

        private static unsafe List <Bitmap> DecodeVideo()
        {
            List <Bitmap> bitmaps = new List <Bitmap>();

            Console.WriteLine(@"Current directory: " + Environment.CurrentDirectory);
            Console.WriteLine(@"Runnung in {0}-bit mode.", Environment.Is64BitProcess ? @"64" : @"32");

            // register path to ffmpeg
            switch (Environment.OSVersion.Platform)
            {
            case PlatformID.Win32NT:
            case PlatformID.Win32S:
            case PlatformID.Win32Windows:
                var ffmpegPath = $@"../../../../FFmpeg/bin/{(Environment.Is64BitProcess ? @"x64" : @"x86")}";
                InteropHelper.RegisterLibrariesSearchPath(ffmpegPath);
                break;

            case PlatformID.Unix:
            case PlatformID.MacOSX:
                var libraryPath = Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH);
                InteropHelper.RegisterLibrariesSearchPath(libraryPath);
                break;
            }

            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();
            ffmpeg.avformat_network_init();

            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

            // setup logging
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                Console.Write(line);
            };

            ffmpeg.av_log_set_callback(logCallback);

            // decode N frames from url or path

            //string url = @"../../sample_mpeg4.mp4";
            var url = @"http://www.quirksmode.org/html5/videos/big_buck_bunny.mp4";

            var pFormatContext = ffmpeg.avformat_alloc_context();

            if (ffmpeg.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new ApplicationException(@"Could not open file");
            }

            if (ffmpeg.avformat_find_stream_info(pFormatContext, null) != 0)
            {
                throw new ApplicationException(@"Could not find stream info");
            }

            AVStream *pStream = null;

            for (var i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new ApplicationException(@"Could not found video stream");
            }


            var codecContext = *pStream->codec;

            Console.WriteLine($"codec name: {ffmpeg.avcodec_get_name(codecContext.codec_id)}");

            var width             = codecContext.width;
            var height            = codecContext.height;
            var sourcePixFmt      = codecContext.pix_fmt;
            var codecId           = codecContext.codec_id;
            var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;
            var pConvertContext   = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                                                          width, height, destinationPixFmt,
                                                          ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new ApplicationException(@"Could not initialize the conversion context");
            }

            var pConvertedFrame          = ffmpeg.av_frame_alloc();
            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
            var convertedFrameBuffer     = stackalloc byte[convertedFrameBufferSize];
            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, convertedFrameBuffer, destinationPixFmt, width, height, 1);

            var pCodec = ffmpeg.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new ApplicationException(@"Unsupported codec");
            }

            // reusing codec context from stream info, initally it was looking like this:
            // AVCodecContext* pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec); // but this is not working for all kind of codecs
            var pCodecContext = &codecContext;

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            if (ffmpeg.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new ApplicationException(@"Could not open codec");
            }

            var pDecodedFrame = ffmpeg.av_frame_alloc();

            var packet  = new AVPacket();
            var pPacket = &packet;

            ffmpeg.av_init_packet(pPacket);

            var frameNumber = 0;

            //while (frameNumber < 200)
            while (true)
            {
                try
                {
                    if (ffmpeg.av_read_frame(pFormatContext, pPacket) < 0)
                    {
                        //throw new ApplicationException(@"Could not read frame");
                        break;
                    }

                    if (pPacket->stream_index != pStream->index)
                    {
                        continue;
                    }

                    if (ffmpeg.avcodec_send_packet(pCodecContext, pPacket) < 0)
                    {
                        throw new ApplicationException($@"Error while sending packet {frameNumber}");
                    }

                    if (ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame) < 0)
                    {
                        throw new ApplicationException($@"Error while receiving frame {frameNumber}");
                    }

                    Console.WriteLine($@"frame: {frameNumber}");

                    ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
                }
                finally
                {
                    ffmpeg.av_packet_unref(pPacket);
                    ffmpeg.av_frame_unref(pDecodedFrame);
                }

                var convertedFrameBufferPtr = (IntPtr)convertedFrameBuffer;

                using (var bitmap = new Bitmap(width, height, dstLinesize[0], PixelFormat.Format24bppRgb,
                                               convertedFrameBufferPtr))
                {
                    bitmap.Save($@"frame.buffer{frameNumber}.jpg", ImageFormat.Jpeg);
                }

                frameNumber++;
            }

            ffmpeg.av_free(pConvertedFrame);
            ffmpeg.sws_freeContext(pConvertContext);

            ffmpeg.av_free(pDecodedFrame);
            ffmpeg.avcodec_close(pCodecContext);
            ffmpeg.avformat_close_input(&pFormatContext);

            return(bitmaps);
        }
Пример #16
0
        public List <NSImage> ProcessWithFFmpeg(string path, int thumbnial_width)
        {
            unsafe {
                // FFmpeg test
                Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

                // setup logging
                ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
                av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
                {
                    if (level > ffmpeg.av_log_get_level())
                    {
                        return;
                    }

                    var lineSize    = 1024;
                    var lineBuffer  = stackalloc byte[lineSize];
                    var printPrefix = 1;
                    ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                    var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                    Console.Write(line);
                };
                ffmpeg.av_log_set_callback(logCallback);

                // decode N frames from url or path

                //string url = @"../../sample_mpeg4.mp4";
                var url = path;

                var pFormatContext = ffmpeg.avformat_alloc_context();

                int error;
                error = ffmpeg.avformat_open_input(&pFormatContext, url, null, null);
                if (error != 0)
                {
                    throw new ApplicationException(GetErrorMessage(error));
                }

                error = ffmpeg.avformat_find_stream_info(pFormatContext, null);
                if (error != 0)
                {
                    throw new ApplicationException(GetErrorMessage(error));
                }

                AVDictionaryEntry *tag = null;
                while ((tag = ffmpeg.av_dict_get(pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
                {
                    var key   = Marshal.PtrToStringAnsi((IntPtr)tag->key);
                    var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
                    Console.WriteLine($"{key} = {value}");
                }

                AVStream *pStream    = null;
                int       videoSteam = -1;
                for (var i = 0; i < pFormatContext->nb_streams; i++)
                {
                    if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                    {
                        pStream    = pFormatContext->streams[i];
                        videoSteam = i;
                        break;
                    }
                }
                if (pStream == null)
                {
                    throw new ApplicationException(@"Could not found video stream.");
                }

                var codecContext = *pStream->codec;

                Console.WriteLine($"codec name: {ffmpeg.avcodec_get_name(codecContext.codec_id)}");

                var width  = codecContext.width;
                var height = codecContext.height;

                // Set Thumbnail Size
                int tWidth  = thumbnial_width;
                int tHeight = (int)(thumbnial_width / ((float)width / (float)height));

                Console.WriteLine("thumbnail width is {0} and height is {1}", tWidth, tHeight);

                var sourcePixFmt      = codecContext.pix_fmt;
                var codecId           = codecContext.codec_id;
                var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_RGBA;
                var pConvertContext   = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                                                              tWidth, tHeight, destinationPixFmt,
                                                              ffmpeg.SWS_BILINEAR, null, null, null);
                if (pConvertContext == null)
                {
                    throw new ApplicationException(@"Could not initialize the conversion context.");
                }

                var pConvertedFrame          = ffmpeg.av_frame_alloc();
                var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, tWidth, tHeight, 1);
                var convertedFrameBufferPtr  = Marshal.AllocHGlobal(convertedFrameBufferSize);
                var dstData     = new byte_ptrArray4();
                var dstLinesize = new int_array4();
                ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)convertedFrameBufferPtr, destinationPixFmt, tWidth, tHeight, 1);

                var pCodec = ffmpeg.avcodec_find_decoder(codecId);
                if (pCodec == null)
                {
                    throw new ApplicationException(@"Unsupported codec.");
                }

                var pCodecContext = &codecContext;

                if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
                {
                    pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
                }

                error = ffmpeg.avcodec_open2(pCodecContext, pCodec, null);
                if (error < 0)
                {
                    throw new ApplicationException(GetErrorMessage(error));
                }


                var pDecodedFrame = ffmpeg.av_frame_alloc();

                var packet  = new AVPacket();
                var pPacket = &packet;
                ffmpeg.av_init_packet(pPacket);

                // Calculate Time interval for Frame
                AVRational relation = new AVRational()
                {
                    num = 1,
                    den = ffmpeg.AV_TIME_BASE
                };

                var    frameNumber    = 24;
                long   duration       = ffmpeg.av_rescale_q(pFormatContext->duration, relation, pStream->time_base);
                double interval       = duration / (double)frameNumber;
                var    timebase       = pStream->time_base;
                double timebaseDouble = timebase.num / timebase.den;

                int count      = 0;
                var thumbnails = new List <NSImage>();
                while (count <= frameNumber)
                {
                    long seek_pos = Convert.ToInt64(interval * count + pStream->start_time);

                    ffmpeg.avcodec_flush_buffers(pCodecContext);

                    error = ffmpeg.av_seek_frame(pFormatContext, videoSteam, seek_pos, ffmpeg.AVSEEK_FLAG_BACKWARD);
                    if (error < 0)
                    {
                        throw new ApplicationException(GetErrorMessage(error));
                    }

                    ffmpeg.avcodec_flush_buffers(pCodecContext);

                    Console.WriteLine("Frame seek pos {0} {1}", seek_pos, count);

                    while ((error = ffmpeg.av_read_frame(pFormatContext, pPacket)) >= 0)
                    {
                        if (packet.stream_index == videoSteam)
                        {
                            Console.WriteLine("Process frame {0}", count);


                            error = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);

                            if (error < 0)
                            {
                                throw new ApplicationException(GetErrorMessage(error));
                            }

                            error = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);

                            if (error == ffmpeg.AVERROR(35))
                            {
                                continue;
                            }
                            if (error < 0)
                            {
                                throw new ApplicationException(GetErrorMessage(error));
                            }

                            Console.WriteLine($@"frame: {count}");

                            ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);

                            var image = SaveToFile(dstData, tWidth, tHeight, $@"{count}.tiff");

                            thumbnails.Add(image);
                            count++;
                            break;
                        }

                        ffmpeg.av_packet_unref(pPacket);
                        ffmpeg.av_frame_unref(pDecodedFrame);
                    }
                }

                Marshal.FreeHGlobal(convertedFrameBufferPtr);
                ffmpeg.av_free(pConvertedFrame);
                ffmpeg.sws_freeContext(pConvertContext);

                ffmpeg.av_free(pDecodedFrame);
                ffmpeg.avcodec_close(pCodecContext);
                ffmpeg.avformat_close_input(&pFormatContext);

                return(thumbnails);
            }
        }
Пример #17
0
        /// <summary>
        /// 对读取的264数据包进行解码和转换
        /// </summary>
        /// <param name="show">解码完成回调函数</param>
        /// <param name="url">播放地址,也可以是本地文件地址</param>
        public unsafe void Start(ShowBitmap show, string url)
        {
            CanRun = true;

            Console.WriteLine(@"Current directory: " + Environment.CurrentDirectory);
            Console.WriteLine(@"Runnung in {0}-bit mode.", Environment.Is64BitProcess ? @"64" : @"32");
            //FFmpegDLL目录查找和设置
            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            #region ffmpeg 初始化
            // 初始化注册ffmpeg相关的编码器
            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();
            ffmpeg.avformat_network_init();

            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");
            #endregion

            #region ffmpeg 日志
            // 设置记录ffmpeg日志级别
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                Console.Write(line);
            };
            ffmpeg.av_log_set_callback(logCallback);

            #endregion

            #region ffmpeg 转码


            // 分配音视频格式上下文
            var pFormatContext = ffmpeg.avformat_alloc_context();

            int error;

            //打开流
            error = ffmpeg.avformat_open_input(&pFormatContext, url, null, null);
            if (error != 0)
            {
                throw new ApplicationException(GetErrorMessage(error));
            }

            // 读取媒体流信息
            error = ffmpeg.avformat_find_stream_info(pFormatContext, null);
            if (error != 0)
            {
                throw new ApplicationException(GetErrorMessage(error));
            }

            // 这里只是为了打印些视频参数
            AVDictionaryEntry *tag = null;
            while ((tag = ffmpeg.av_dict_get(pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
            {
                var key   = Marshal.PtrToStringAnsi((IntPtr)tag->key);
                var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
                Console.WriteLine($"{key} = {value}");
            }

            // 从格式化上下文获取流索引
            AVStream *pStream = null, aStream;
            for (var i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                }
                else if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    aStream = pFormatContext->streams[i];
                }
            }
            if (pStream == null)
            {
                throw new ApplicationException(@"Could not found video stream.");
            }

            // 获取流的编码器上下文
            var codecContext = *pStream->codec;

            Console.WriteLine($"codec name: {ffmpeg.avcodec_get_name(codecContext.codec_id)}");
            // 获取图像的宽、高及像素格式
            var width        = codecContext.width;
            var height       = codecContext.height;
            var sourcePixFmt = codecContext.pix_fmt;

            // 得到编码器ID
            var codecId = codecContext.codec_id;
            // 目标像素格式
            var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;


            // 某些264格式codecContext.pix_fmt获取到的格式是AV_PIX_FMT_NONE 统一都认为是YUV420P
            if (sourcePixFmt == AVPixelFormat.AV_PIX_FMT_NONE && codecId == AVCodecID.AV_CODEC_ID_H264)
            {
                sourcePixFmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
            }

            // 得到SwsContext对象:用于图像的缩放和转换操作
            var pConvertContext = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                                                        width, height, destinationPixFmt,
                                                        ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            if (pConvertContext == null)
            {
                throw new ApplicationException(@"Could not initialize the conversion context.");
            }

            //分配一个默认的帧对象:AVFrame
            var pConvertedFrame = ffmpeg.av_frame_alloc();
            // 目标媒体格式需要的字节长度
            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
            // 分配目标媒体格式内存使用
            var convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();
            // 设置图像填充参数
            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)convertedFrameBufferPtr, destinationPixFmt, width, height, 1);

            #endregion

            #region ffmpeg 解码
            // 根据编码器ID获取对应的解码器
            var pCodec = ffmpeg.avcodec_find_decoder(codecId);
            if (pCodec == null)
            {
                throw new ApplicationException(@"Unsupported codec.");
            }

            var pCodecContext = &codecContext;

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            // 通过解码器打开解码器上下文:AVCodecContext pCodecContext
            error = ffmpeg.avcodec_open2(pCodecContext, pCodec, null);
            if (error < 0)
            {
                throw new ApplicationException(GetErrorMessage(error));
            }

            // 分配解码帧对象:AVFrame pDecodedFrame
            var pDecodedFrame = ffmpeg.av_frame_alloc();

            // 初始化媒体数据包
            var packet  = new AVPacket();
            var pPacket = &packet;
            ffmpeg.av_init_packet(pPacket);

            var frameNumber = 0;
            while (CanRun)
            {
                try
                {
                    do
                    {
                        // 读取一帧未解码数据
                        error = ffmpeg.av_read_frame(pFormatContext, pPacket);
                        // Console.WriteLine(pPacket->dts);
                        if (error == ffmpeg.AVERROR_EOF)
                        {
                            break;
                        }
                        if (error < 0)
                        {
                            throw new ApplicationException(GetErrorMessage(error));
                        }

                        if (pPacket->stream_index != pStream->index)
                        {
                            continue;
                        }

                        // 解码
                        error = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);
                        if (error < 0)
                        {
                            throw new ApplicationException(GetErrorMessage(error));
                        }
                        // 解码输出解码数据
                        error = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);
                    } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN) && CanRun);
                    if (error == ffmpeg.AVERROR_EOF)
                    {
                        break;
                    }
                    if (error < 0)
                    {
                        throw new ApplicationException(GetErrorMessage(error));
                    }

                    if (pPacket->stream_index != pStream->index)
                    {
                        continue;
                    }

                    //Console.WriteLine($@"frame: {frameNumber}");
                    // YUV->RGB
                    ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
                }
                finally
                {
                    ffmpeg.av_packet_unref(pPacket);      //释放数据包对象引用
                    ffmpeg.av_frame_unref(pDecodedFrame); //释放解码帧对象引用
                }

                // 封装Bitmap图片
                var bitmap = new Bitmap(width, height, dstLinesize[0], PixelFormat.Format24bppRgb, convertedFrameBufferPtr);
                // 回调
                show(bitmap);
                //bitmap.Save(AppDomain.CurrentDomain.BaseDirectory + "\\264\\frame.buffer."+ frameNumber + ".jpg", ImageFormat.Jpeg);

                frameNumber++;
            }
            //播放完置空播放图片
            show(null);

            #endregion

            #region 释放资源
            Marshal.FreeHGlobal(convertedFrameBufferPtr);
            ffmpeg.av_free(pConvertedFrame);
            ffmpeg.sws_freeContext(pConvertContext);

            ffmpeg.av_free(pDecodedFrame);
            ffmpeg.avcodec_close(pCodecContext);
            ffmpeg.avformat_close_input(&pFormatContext);


            #endregion
        }
Пример #18
0
        public static void UseDefaultLogCallback()
        {
            logCallback = (p0, level, format, vl) => ffmpeg.av_log_default_callback(p0, level, format, vl);

            ffmpeg.av_log_set_callback(logCallback);
        }
Пример #19
0
        public unsafe static bool FromVideo(string srcFile, string outFile)
        {
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);

            // do not convert to local function
            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                Console.ForegroundColor = ConsoleColor.Yellow;
                Console.Write(line);
                Console.ResetColor();
            };

            ffmpeg.av_log_set_callback(logCallback);

            using (var vsd = new VideoStreamDecoder(srcFile))
            {
                Console.WriteLine($"codec name: {vsd.CodecName}");

                var info = vsd.GetContextInfo();
                info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}"));

                var sourceSize             = vsd.FrameSize;
                var sourcePixelFormat      = vsd.PixelFormat;
                var destinationSize        = sourceSize;
                var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;
                using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
                {
                    //获取第一帧
                    if (vsd.TryDecodeNextFrame(out var frame))
                    {
                        var convertedFrame = vfc.Convert(frame);
                        using (var bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]))
                        {
                            bitmap.Save(outFile, ImageFormat.Jpeg);
                            return(true);
                        }
                    }
                    //var frameNumber = 0;
                    //while (vsd.TryDecodeNextFrame(out var frame))
                    //{
                    //    var convertedFrame = vfc.Convert(frame);

                    //    using (var bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]))
                    //        bitmap.Save($"frame.{frameNumber:D8}.jpg", ImageFormat.Jpeg);

                    //    Console.WriteLine($"frame: {frameNumber}");
                    //    frameNumber++;
                    //}
                }
            }
            return(false);
        }