Example #1
0
        public static IReadOnlyList <KeyValuePair <string, string> > GetKeyValues(AVDictionary *dict)
        {
            List <KeyValuePair <string, string> > keyValuePairs = new List <KeyValuePair <string, string> >();
            AVDictionaryEntry *t = null;

            while ((t = ffmpeg.av_dict_get(dict, "", t, (int)(DictFlags.AV_DICT_IGNORE_SUFFIX))) != null)
            {
                keyValuePairs.Add((*t).ToKeyValuePair());
            }
            return(keyValuePairs);
        }
Example #2
0
        public string[] GetValue(string key, DictFlags flags)
        {
            List <string>      output = new List <string>();
            AVDictionaryEntry *t      = null;

            while ((t = ffmpeg.av_dict_get(*ppDictionary, key, t, (int)flags)) != null)
            {
                output.Add((*t).ToKeyValuePair().Value);
            }
            return(output.ToArray());
        }
Example #3
0
        public bool ContainsKey(string key)
        {
            if (key == null)
            {
                throw new ArgumentNullException(nameof(key));
            }

            AVDictionaryEntry *entry = av_dict_get(this, key, null, (int)DictFlags.MatchCase);

            return(entry != null);
        }
Example #4
0
        public static KeyValuePair <string, string>[] GetKeyValues(AVDictionary *dict)
        {
            List <KeyValuePair <string, string> > keyValuePairs = new List <KeyValuePair <string, string> >();
            AVDictionaryEntry *t = null;

            while ((t = ffmpeg.av_dict_get(dict, "", t, (int)(DictFlags.IgnoreSuffix))) != null)
            {
                keyValuePairs.Add((*t).ToKeyValuePair());
            }
            return(keyValuePairs.ToArray());
        }
Example #5
0
        public bool TryGetValues(string key, AVDictReadFlags flags, out string[] values)
        {
            var list = new List <string>();
            AVDictionaryEntry *prev = null;

            while ((prev = (AVDictionaryEntry *)av_dict_get_safe(this, key, (IntPtr)prev, flags)) != null)
            {
                list.Add((*prev).GetValue());
            }
            values = list.ToArray();
            return(values.Length > 0);
        }
Example #6
0
        public bool TryGetValues(string key, DictFlags flags, out string[] values)
        {
            var list = new List <string>();
            AVDictionaryEntry *prev = null;

            while ((prev = (AVDictionaryEntry *)DictGet(this, key, (IntPtr)prev, flags)) != null)
            {
                list.Add(((IntPtr)prev->value).PtrToStringUTF8());
            }
            values = list.ToArray();
            return(values.Length > 0);
        }
Example #7
0
        public bool Remove(string key, DictFlags flags = DictFlags.AV_DICT_MATCH_CASE)
        {
            int count            = 0;
            AVDictionaryEntry *t = null;

            while ((t = ffmpeg.av_dict_get(*ppDictionary, key, t, (int)flags)) != null)
            {
                Add(key, null, 0);
                count++;
            }
            return(count != 0);
        }
        public unsafe IReadOnlyDictionary <string, string> GetContextInfo()
        {
            AVDictionaryEntry *         tag    = null;
            Dictionary <string, string> result = new Dictionary <string, string>();

            while ((tag = ffmpeg.av_dict_get(this._pFormatContext->metadata, "", tag, 2)) != null)
            {
                string key   = Marshal.PtrToStringAnsi((IntPtr)(void *)tag->key);
                string value = Marshal.PtrToStringAnsi((IntPtr)(void *)tag->value);
                result.Add(key, value);
            }
            return(result);
        }
Example #9
0
        private static IReadOnlyDictionary <string, string> GetDictionary(AVDictionary *dict)
        {
            AVDictionaryEntry *tag = null;
            var result             = new Dictionary <string, string>();

            while ((tag = ffmpeg.av_dict_get(dict, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
            {
                var key   = Marshal.PtrToStringAnsi((IntPtr)tag->key);
                var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
                result.Add(key, value);
            }

            return(result);
        }
Example #10
0
        public IReadOnlyDictionary <String, String> GetContextInfo()
        {
            AVDictionaryEntry *tag = null;
            var result             = new Dictionary <String, String>();

            while ((tag = ffmpeg.av_dict_get(_pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
            {
                var key   = Marshal.PtrToStringAnsi((IntPtr)tag->key);
                var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
                result.Add(key, value);
            }

            return(result);
        }
Example #11
0
        public bool TryGetValue(string key, [NotNullWhen(returnValue: true)] out string value)
        {
            AVDictionaryEntry *entry = av_dict_get(this, key, null, (int)MediaDictionaryReadFlags.CaseSensitive);

            if (entry == null)
            {
                value = null !;
                return(false);
            }
            else
            {
                value = Marshal.PtrToStringUTF8((IntPtr)entry->value);
                return(true);
            }
        }
Example #12
0
        public bool TryGetValue(string key, out string value)
        {
            AVDictionaryEntry *entry = av_dict_get(this, key, null, (int)DictFlags.MatchCase);

            if (entry == null)
            {
                value = null;
                return(false);
            }
            else
            {
                value = ((IntPtr)entry->value).PtrToStringUTF8();
                return(true);
            }
        }
Example #13
0
        /// <summary>
        /// Converst a <see cref="AVDictionary"/> object to a <see cref="IReadOnlyDictionary{String, String}"/>.
        /// </summary>
        /// <param name="dictionary">
        /// The dictionary to convert.
        /// </param>
        /// <returns>
        /// An equivalent dictionary.
        /// </returns>
        public static unsafe IReadOnlyDictionary <string, string> ToReadOnlyDictionary(AVDictionary *dictionary)
        {
            Dictionary <string, string> values = new Dictionary <string, string>();

            AVDictionaryEntry *tag = null;

            while ((tag = ffmpeg.av_dict_get(dictionary, string.Empty, tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
            {
                var key   = Marshal.PtrToStringAnsi((IntPtr)tag->key);
                var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);

                values.Add(key !, value !);
            }

            return(values);
        }
Example #14
0
        internal unsafe static IDictionary <string, string> DictionaryConvert(AVDictionary *avDictionary)
        {
            IDictionary <string, string> metaDataDictionary = new Dictionary <string, string>();
            AVDictionaryEntry *          dictEntry          = null;

            do
            {
                dictEntry = ffmpeg.av_dict_get(avDictionary, string.Empty, dictEntry, ffmpeg.AV_DICT_IGNORE_SUFFIX);
                if (dictEntry != null)
                {
                    metaDataDictionary.Add(
                        Marshal.PtrToStringAnsi(new IntPtr(dictEntry->key)),
                        Marshal.PtrToStringAnsi(new IntPtr(dictEntry->value)));
                }
            } while (dictEntry != null);

            return(metaDataDictionary);
        }
Example #15
0
        public string this[string key]
        {
            get
            {
                AVDictionaryEntry *entry = av_dict_get(this, key, null, (int)DictFlags.MatchCase);
                if (entry == null)
                {
                    throw new KeyNotFoundException(key);
                }

                return(((IntPtr)entry->value).PtrToStringUTF8());
            }
            set
            {
                AVDictionary *ptr = this;
                av_dict_set(&ptr, key, value, (int)DictFlags.None).ThrowIfError();
                _handle = ptr;
            }
        }
Example #16
0
        internal static unsafe Dictionary <string, string> ToDictionary(AVDictionary *avDictionary)
        {
            var dictionary = new Dictionary <string, string>();

            if (avDictionary == null)
            {
                return(dictionary);
            }

            AVDictionaryEntry *tag = null;

            while ((tag = ffmpeg.av_dict_get(avDictionary, string.Empty, tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
            {
                var key   = Marshal.PtrToStringAnsi((IntPtr)tag->key);
                var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
                dictionary.Add(key, value);
            }
            return(dictionary);
        }
Example #17
0
        public string this[string key]
        {
            get
            {
                AVDictionaryEntry *entry = av_dict_get(this, key, null, (int)MediaDictionaryReadFlags.CaseSensitive);
                if (entry == null)
                {
                    throw new KeyNotFoundException(key);
                }

                return(Marshal.PtrToStringUTF8((IntPtr)entry->value));
            }
            set
            {
                AVDictionary *ptr = this;
                av_dict_set(&ptr, key, value, (int)MediaDictionarySetFlags.None).ThrowIfError();
                _nativePointer = (IntPtr)ptr;
            }
        }
Example #18
0
    public Dictionary <string, string> ReadMetadata()
    {
        var metadata = new Dictionary <string, string>();

        AVDictionaryEntry *tag = null;

        while (true)
        {
            tag = ffmpeg.av_dict_get(RawFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX);
            if (tag == null)
            {
                break;
            }

            metadata.Add(
                Marshal.PtrToStringUTF8((IntPtr)tag->key) !.ToLower(CultureInfo.InvariantCulture),
                Marshal.PtrToStringUTF8((IntPtr)tag->value) !);
        }

        return(metadata);
    }
Example #19
0
        private static unsafe void Main(string[] args)
        {
            Console.WriteLine(@"Current directory: " + Environment.CurrentDirectory);
            Console.WriteLine(@"Runnung in {0}-bit mode.", Environment.Is64BitProcess ? @"64" : @"32");

            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();
            ffmpeg.avformat_network_init();

            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

            // setup logging
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                Console.Write(line);
            };

            ffmpeg.av_log_set_callback(logCallback);

            // decode N frames from url or path

            //string url = @"../../sample_mpeg4.mp4";
            var url = @"http://www.quirksmode.org/html5/videos/big_buck_bunny.mp4";

            var pFormatContext = ffmpeg.avformat_alloc_context();

            int error;

            error = ffmpeg.avformat_open_input(&pFormatContext, url, null, null);
            if (error != 0)
            {
                throw new ApplicationException(GetErrorMessage(error));
            }

            error = ffmpeg.avformat_find_stream_info(pFormatContext, null);
            if (error != 0)
            {
                throw new ApplicationException(GetErrorMessage(error));
            }

            AVDictionaryEntry *tag = null;

            while ((tag = ffmpeg.av_dict_get(pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
            {
                var key   = Marshal.PtrToStringAnsi((IntPtr)tag->key);
                var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
                Console.WriteLine($"{key} = {value}");
            }

            AVStream *pStream = null;

            for (var i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new ApplicationException(@"Could not found video stream.");
            }

            var codecContext = *pStream->codec;

            Console.WriteLine($"codec name: {ffmpeg.avcodec_get_name(codecContext.codec_id)}");

            var width             = codecContext.width;
            var height            = codecContext.height;
            var sourcePixFmt      = codecContext.pix_fmt;
            var codecId           = codecContext.codec_id;
            var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;
            var pConvertContext   = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                                                          width, height, destinationPixFmt,
                                                          ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new ApplicationException(@"Could not initialize the conversion context.");
            }

            var pConvertedFrame          = ffmpeg.av_frame_alloc();
            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
            var convertedFrameBufferPtr  = Marshal.AllocHGlobal(convertedFrameBufferSize);
            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)convertedFrameBufferPtr, destinationPixFmt, width, height, 1);

            var pCodec = ffmpeg.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new ApplicationException(@"Unsupported codec.");
            }

            var pCodecContext = &codecContext;

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            error = ffmpeg.avcodec_open2(pCodecContext, pCodec, null);
            if (error < 0)
            {
                throw new ApplicationException(GetErrorMessage(error));
            }

            var pDecodedFrame = ffmpeg.av_frame_alloc();

            var packet  = new AVPacket();
            var pPacket = &packet;

            ffmpeg.av_init_packet(pPacket);

            var frameNumber = 0;

            while (frameNumber < 2000)
            {
                try
                {
                    do
                    {
                        error = ffmpeg.av_read_frame(pFormatContext, pPacket);
                        if (error == ffmpeg.AVERROR_EOF)
                        {
                            break;
                        }
                        if (error < 0)
                        {
                            throw new ApplicationException(GetErrorMessage(error));
                        }

                        if (pPacket->stream_index != pStream->index)
                        {
                            continue;
                        }

                        error = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);
                        if (error < 0)
                        {
                            throw new ApplicationException(GetErrorMessage(error));
                        }

                        error = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);
                    } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));
                    if (error == ffmpeg.AVERROR_EOF)
                    {
                        break;
                    }
                    if (error < 0)
                    {
                        throw new ApplicationException(GetErrorMessage(error));
                    }

                    if (pPacket->stream_index != pStream->index)
                    {
                        continue;
                    }

                    Console.WriteLine($@"frame: {frameNumber}");

                    ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
                }
                finally
                {
                    ffmpeg.av_packet_unref(pPacket);
                    ffmpeg.av_frame_unref(pDecodedFrame);
                }

                using (var bitmap = new Bitmap(width, height, dstLinesize[0], PixelFormat.Format24bppRgb, convertedFrameBufferPtr))
                    bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);

                frameNumber++;
            }

            Marshal.FreeHGlobal(convertedFrameBufferPtr);
            ffmpeg.av_free(pConvertedFrame);
            ffmpeg.sws_freeContext(pConvertContext);

            ffmpeg.av_free(pDecodedFrame);
            ffmpeg.avcodec_close(pCodecContext);
            ffmpeg.avformat_close_input(&pFormatContext);
        }
Example #20
0
        public static StreamInfo Get(AVStream *st)
        {
            StreamInfo si = new StreamInfo();

            si.Type          = st->codecpar->codec_type;
            si.CodecID       = st->codecpar->codec_id;
            si.CodecName     = avcodec_get_name(st->codecpar->codec_id);
            si.StreamIndex   = st->index;
            si.Timebase      = av_q2d(st->time_base) * 10000.0 * 1000.0;
            si.DurationTicks = (long)(st->duration * si.Timebase);
            si.StartTime     = (st->start_time != AV_NOPTS_VALUE) ? (long)(st->start_time * si.Timebase) : 0;
            si.BitRate       = st->codecpar->bit_rate;

            if (si.Type == AVMEDIA_TYPE_VIDEO)
            {
                si.PixelFormat  = (AVPixelFormat)Enum.ToObject(typeof(AVPixelFormat), st->codecpar->format);
                si.Width        = st->codecpar->width;
                si.Height       = st->codecpar->height;
                si.FPS          = av_q2d(st->r_frame_rate);
                si.AspectRatio  = st->codecpar->sample_aspect_ratio;
                si.VideoBitRate = st->codecpar->bit_rate;
            }
            else if (si.Type == AVMEDIA_TYPE_AUDIO)
            {
                si.SampleFormat  = (AVSampleFormat)Enum.ToObject(typeof(AVSampleFormat), st->codecpar->format);
                si.SampleRate    = st->codecpar->sample_rate;
                si.ChannelLayout = st->codecpar->channel_layout;
                si.Channels      = st->codecpar->channels;
                si.Bits          = st->codecpar->bits_per_coded_sample;
                si.AudioBitRate  = st->codecpar->bit_rate;

                byte[] buf = new byte[50];
                fixed(byte *bufPtr = buf)
                {
                    av_get_channel_layout_string(bufPtr, 50, si.Channels, si.ChannelLayout);
                    si.ChannelLayoutStr = Utils.BytePtrToStringUTF8(bufPtr);
                }
            }

            si.Metadata = new Dictionary <string, string>();

            AVDictionaryEntry *b = null;

            while (true)
            {
                b = av_dict_get(st->metadata, "", b, AV_DICT_IGNORE_SUFFIX);
                if (b == null)
                {
                    break;
                }
                si.Metadata.Add(Utils.BytePtrToStringUTF8(b->key), Utils.BytePtrToStringUTF8(b->value));
            }

            foreach (var kv in si.Metadata)
            {
                if (kv.Key.ToLower() == "language" || kv.Key.ToLower() == "lang")
                {
                    si.Language = kv.Value; break;
                }
            }

            return(si);
        }
Example #21
0
        public List <NSImage> ProcessWithFFmpeg(string path, int thumbnial_width)
        {
            unsafe {
                // FFmpeg test
                Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

                // setup logging
                ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
                av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
                {
                    if (level > ffmpeg.av_log_get_level())
                    {
                        return;
                    }

                    var lineSize    = 1024;
                    var lineBuffer  = stackalloc byte[lineSize];
                    var printPrefix = 1;
                    ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                    var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                    Console.Write(line);
                };
                ffmpeg.av_log_set_callback(logCallback);

                // decode N frames from url or path

                //string url = @"../../sample_mpeg4.mp4";
                var url = path;

                var pFormatContext = ffmpeg.avformat_alloc_context();

                int error;
                error = ffmpeg.avformat_open_input(&pFormatContext, url, null, null);
                if (error != 0)
                {
                    throw new ApplicationException(GetErrorMessage(error));
                }

                error = ffmpeg.avformat_find_stream_info(pFormatContext, null);
                if (error != 0)
                {
                    throw new ApplicationException(GetErrorMessage(error));
                }

                AVDictionaryEntry *tag = null;
                while ((tag = ffmpeg.av_dict_get(pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
                {
                    var key   = Marshal.PtrToStringAnsi((IntPtr)tag->key);
                    var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
                    Console.WriteLine($"{key} = {value}");
                }

                AVStream *pStream    = null;
                int       videoSteam = -1;
                for (var i = 0; i < pFormatContext->nb_streams; i++)
                {
                    if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                    {
                        pStream    = pFormatContext->streams[i];
                        videoSteam = i;
                        break;
                    }
                }
                if (pStream == null)
                {
                    throw new ApplicationException(@"Could not found video stream.");
                }

                var codecContext = *pStream->codec;

                Console.WriteLine($"codec name: {ffmpeg.avcodec_get_name(codecContext.codec_id)}");

                var width  = codecContext.width;
                var height = codecContext.height;

                // Set Thumbnail Size
                int tWidth  = thumbnial_width;
                int tHeight = (int)(thumbnial_width / ((float)width / (float)height));

                Console.WriteLine("thumbnail width is {0} and height is {1}", tWidth, tHeight);

                var sourcePixFmt      = codecContext.pix_fmt;
                var codecId           = codecContext.codec_id;
                var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_RGBA;
                var pConvertContext   = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                                                              tWidth, tHeight, destinationPixFmt,
                                                              ffmpeg.SWS_BILINEAR, null, null, null);
                if (pConvertContext == null)
                {
                    throw new ApplicationException(@"Could not initialize the conversion context.");
                }

                var pConvertedFrame          = ffmpeg.av_frame_alloc();
                var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, tWidth, tHeight, 1);
                var convertedFrameBufferPtr  = Marshal.AllocHGlobal(convertedFrameBufferSize);
                var dstData     = new byte_ptrArray4();
                var dstLinesize = new int_array4();
                ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)convertedFrameBufferPtr, destinationPixFmt, tWidth, tHeight, 1);

                var pCodec = ffmpeg.avcodec_find_decoder(codecId);
                if (pCodec == null)
                {
                    throw new ApplicationException(@"Unsupported codec.");
                }

                var pCodecContext = &codecContext;

                if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
                {
                    pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
                }

                error = ffmpeg.avcodec_open2(pCodecContext, pCodec, null);
                if (error < 0)
                {
                    throw new ApplicationException(GetErrorMessage(error));
                }


                var pDecodedFrame = ffmpeg.av_frame_alloc();

                var packet  = new AVPacket();
                var pPacket = &packet;
                ffmpeg.av_init_packet(pPacket);

                // Calculate Time interval for Frame
                AVRational relation = new AVRational()
                {
                    num = 1,
                    den = ffmpeg.AV_TIME_BASE
                };

                var    frameNumber    = 24;
                long   duration       = ffmpeg.av_rescale_q(pFormatContext->duration, relation, pStream->time_base);
                double interval       = duration / (double)frameNumber;
                var    timebase       = pStream->time_base;
                double timebaseDouble = timebase.num / timebase.den;

                int count      = 0;
                var thumbnails = new List <NSImage>();
                while (count <= frameNumber)
                {
                    long seek_pos = Convert.ToInt64(interval * count + pStream->start_time);

                    ffmpeg.avcodec_flush_buffers(pCodecContext);

                    error = ffmpeg.av_seek_frame(pFormatContext, videoSteam, seek_pos, ffmpeg.AVSEEK_FLAG_BACKWARD);
                    if (error < 0)
                    {
                        throw new ApplicationException(GetErrorMessage(error));
                    }

                    ffmpeg.avcodec_flush_buffers(pCodecContext);

                    Console.WriteLine("Frame seek pos {0} {1}", seek_pos, count);

                    while ((error = ffmpeg.av_read_frame(pFormatContext, pPacket)) >= 0)
                    {
                        if (packet.stream_index == videoSteam)
                        {
                            Console.WriteLine("Process frame {0}", count);


                            error = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);

                            if (error < 0)
                            {
                                throw new ApplicationException(GetErrorMessage(error));
                            }

                            error = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);

                            if (error == ffmpeg.AVERROR(35))
                            {
                                continue;
                            }
                            if (error < 0)
                            {
                                throw new ApplicationException(GetErrorMessage(error));
                            }

                            Console.WriteLine($@"frame: {count}");

                            ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);

                            var image = SaveToFile(dstData, tWidth, tHeight, $@"{count}.tiff");

                            thumbnails.Add(image);
                            count++;
                            break;
                        }

                        ffmpeg.av_packet_unref(pPacket);
                        ffmpeg.av_frame_unref(pDecodedFrame);
                    }
                }

                Marshal.FreeHGlobal(convertedFrameBufferPtr);
                ffmpeg.av_free(pConvertedFrame);
                ffmpeg.sws_freeContext(pConvertContext);

                ffmpeg.av_free(pDecodedFrame);
                ffmpeg.avcodec_close(pCodecContext);
                ffmpeg.avformat_close_input(&pFormatContext);

                return(thumbnails);
            }
        }
Example #22
0
        /// <summary>
        /// 对读取的264数据包进行解码和转换
        /// </summary>
        /// <param name="show">解码完成回调函数</param>
        /// <param name="url">播放地址,也可以是本地文件地址</param>
        public unsafe void Start(ShowBitmap show, string url)
        {
            CanRun = true;

            Console.WriteLine(@"Current directory: " + Environment.CurrentDirectory);
            Console.WriteLine(@"Runnung in {0}-bit mode.", Environment.Is64BitProcess ? @"64" : @"32");
            //FFmpegDLL目录查找和设置
            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            #region ffmpeg 初始化
            // 初始化注册ffmpeg相关的编码器
            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();
            ffmpeg.avformat_network_init();

            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");
            #endregion

            #region ffmpeg 日志
            // 设置记录ffmpeg日志级别
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                Console.Write(line);
            };
            ffmpeg.av_log_set_callback(logCallback);

            #endregion

            #region ffmpeg 转码


            // 分配音视频格式上下文
            var pFormatContext = ffmpeg.avformat_alloc_context();

            int error;

            //打开流
            error = ffmpeg.avformat_open_input(&pFormatContext, url, null, null);
            if (error != 0)
            {
                throw new ApplicationException(GetErrorMessage(error));
            }

            // 读取媒体流信息
            error = ffmpeg.avformat_find_stream_info(pFormatContext, null);
            if (error != 0)
            {
                throw new ApplicationException(GetErrorMessage(error));
            }

            // 这里只是为了打印些视频参数
            AVDictionaryEntry *tag = null;
            while ((tag = ffmpeg.av_dict_get(pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
            {
                var key   = Marshal.PtrToStringAnsi((IntPtr)tag->key);
                var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
                Console.WriteLine($"{key} = {value}");
            }

            // 从格式化上下文获取流索引
            AVStream *pStream = null, aStream;
            for (var i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                }
                else if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    aStream = pFormatContext->streams[i];
                }
            }
            if (pStream == null)
            {
                throw new ApplicationException(@"Could not found video stream.");
            }

            // 获取流的编码器上下文
            var codecContext = *pStream->codec;

            Console.WriteLine($"codec name: {ffmpeg.avcodec_get_name(codecContext.codec_id)}");
            // 获取图像的宽、高及像素格式
            var width        = codecContext.width;
            var height       = codecContext.height;
            var sourcePixFmt = codecContext.pix_fmt;

            // 得到编码器ID
            var codecId = codecContext.codec_id;
            // 目标像素格式
            var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;


            // 某些264格式codecContext.pix_fmt获取到的格式是AV_PIX_FMT_NONE 统一都认为是YUV420P
            if (sourcePixFmt == AVPixelFormat.AV_PIX_FMT_NONE && codecId == AVCodecID.AV_CODEC_ID_H264)
            {
                sourcePixFmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
            }

            // 得到SwsContext对象:用于图像的缩放和转换操作
            var pConvertContext = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                                                        width, height, destinationPixFmt,
                                                        ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            if (pConvertContext == null)
            {
                throw new ApplicationException(@"Could not initialize the conversion context.");
            }

            //分配一个默认的帧对象:AVFrame
            var pConvertedFrame = ffmpeg.av_frame_alloc();
            // 目标媒体格式需要的字节长度
            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
            // 分配目标媒体格式内存使用
            var convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();
            // 设置图像填充参数
            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)convertedFrameBufferPtr, destinationPixFmt, width, height, 1);

            #endregion

            #region ffmpeg 解码
            // 根据编码器ID获取对应的解码器
            var pCodec = ffmpeg.avcodec_find_decoder(codecId);
            if (pCodec == null)
            {
                throw new ApplicationException(@"Unsupported codec.");
            }

            var pCodecContext = &codecContext;

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            // 通过解码器打开解码器上下文:AVCodecContext pCodecContext
            error = ffmpeg.avcodec_open2(pCodecContext, pCodec, null);
            if (error < 0)
            {
                throw new ApplicationException(GetErrorMessage(error));
            }

            // 分配解码帧对象:AVFrame pDecodedFrame
            var pDecodedFrame = ffmpeg.av_frame_alloc();

            // 初始化媒体数据包
            var packet  = new AVPacket();
            var pPacket = &packet;
            ffmpeg.av_init_packet(pPacket);

            var frameNumber = 0;
            while (CanRun)
            {
                try
                {
                    do
                    {
                        // 读取一帧未解码数据
                        error = ffmpeg.av_read_frame(pFormatContext, pPacket);
                        // Console.WriteLine(pPacket->dts);
                        if (error == ffmpeg.AVERROR_EOF)
                        {
                            break;
                        }
                        if (error < 0)
                        {
                            throw new ApplicationException(GetErrorMessage(error));
                        }

                        if (pPacket->stream_index != pStream->index)
                        {
                            continue;
                        }

                        // 解码
                        error = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);
                        if (error < 0)
                        {
                            throw new ApplicationException(GetErrorMessage(error));
                        }
                        // 解码输出解码数据
                        error = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);
                    } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN) && CanRun);
                    if (error == ffmpeg.AVERROR_EOF)
                    {
                        break;
                    }
                    if (error < 0)
                    {
                        throw new ApplicationException(GetErrorMessage(error));
                    }

                    if (pPacket->stream_index != pStream->index)
                    {
                        continue;
                    }

                    //Console.WriteLine($@"frame: {frameNumber}");
                    // YUV->RGB
                    ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
                }
                finally
                {
                    ffmpeg.av_packet_unref(pPacket);      //释放数据包对象引用
                    ffmpeg.av_frame_unref(pDecodedFrame); //释放解码帧对象引用
                }

                // 封装Bitmap图片
                var bitmap = new Bitmap(width, height, dstLinesize[0], PixelFormat.Format24bppRgb, convertedFrameBufferPtr);
                // 回调
                show(bitmap);
                //bitmap.Save(AppDomain.CurrentDomain.BaseDirectory + "\\264\\frame.buffer."+ frameNumber + ".jpg", ImageFormat.Jpeg);

                frameNumber++;
            }
            //播放完置空播放图片
            show(null);

            #endregion

            #region 释放资源
            Marshal.FreeHGlobal(convertedFrameBufferPtr);
            ffmpeg.av_free(pConvertedFrame);
            ffmpeg.sws_freeContext(pConvertContext);

            ffmpeg.av_free(pDecodedFrame);
            ffmpeg.avcodec_close(pCodecContext);
            ffmpeg.avformat_close_input(&pFormatContext);


            #endregion
        }
 /// <summary>
 /// Initializes a new instance of the <see cref="FFDictionaryEntry"/> class.
 /// </summary>
 /// <param name="entryPointer">The entry pointer.</param>
 public FFDictionaryEntry(AVDictionaryEntry *entryPointer)
 {
     Pointer = entryPointer;
 }
 /// <summary>
 /// Initializes a new instance of the <see cref="FFDictionaryEntry"/> class.
 /// </summary>
 /// <param name="entryPointer">The entry pointer.</param>
 public FFDictionaryEntry(AVDictionaryEntry *entryPointer)
 {
     m_Pointer = new IntPtr(entryPointer);
 }
Example #25
0
        public static StreamInfo Get(AVStream *st)
        {
            StreamInfo si = new StreamInfo();

            si.Type        = st->codecpar->codec_type;
            si.CodecID     = st->codecpar->codec_id;
            si.CodecName   = avcodec_get_name(st->codecpar->codec_id);
            si.StreamIndex = st->index;
            si.Timebase    = av_q2d(st->time_base) * 10000.0 * 1000.0;
            si.Duration    = (long)(st->duration * si.Timebase);
            si.StartTime   = (st->start_time != AV_NOPTS_VALUE) ? (long)(st->start_time * si.Timebase) : 0;
            si.BitRate     = st->codecpar->bit_rate;

            if (si.Type == AVMEDIA_TYPE_VIDEO)
            {
                si.PixelFormat     = (AVPixelFormat)Enum.ToObject(typeof(AVPixelFormat), st->codecpar->format);
                si.PixelFormatStr  = si.PixelFormat.ToString().Replace("AV_PIX_FMT_", "").ToLower();
                si.PixelFormatType = PixelFormatType.Software_Sws;

                si.Width  = st->codecpar->width;
                si.Height = st->codecpar->height;
                si.FPS    = av_q2d(st->r_frame_rate);
                var gcd = Utils.GCD(si.Width, si.Height);
                si.AspectRatio = new AspectRatio(si.Width / gcd, si.Height / gcd);

                if (si.PixelFormat != AVPixelFormat.AV_PIX_FMT_NONE)
                {
                    si.ColorRange = st->codecpar->color_range == AVColorRange.AVCOL_RANGE_JPEG ? "FULL" : "LIMITED";

                    if (st->codecpar->color_space == AVColorSpace.AVCOL_SPC_BT470BG)
                    {
                        si.ColorSpace = "BT601";
                    }
                    if (si.Width > 1024 || si.Height >= 600)
                    {
                        si.ColorSpace = "BT709";
                    }
                    else
                    {
                        si.ColorSpace = "BT601";
                    }

                    AVPixFmtDescriptor *pixFmtDesc = av_pix_fmt_desc_get((AVPixelFormat)Enum.ToObject(typeof(AVPixelFormat), si.PixelFormat));
                    si.PixelFormatDesc = pixFmtDesc;
                    var comp0 = pixFmtDesc->comp.ToArray()[0];
                    var comp1 = pixFmtDesc->comp.ToArray()[1];
                    var comp2 = pixFmtDesc->comp.ToArray()[2];

                    si.PixelBits = comp0.depth;
                    si.IsPlanar  = (pixFmtDesc->flags & AV_PIX_FMT_FLAG_PLANAR) != 0;
                    si.IsRGB     = (pixFmtDesc->flags & AV_PIX_FMT_FLAG_RGB) != 0;

                    si.Comp0Step = comp0.step;
                    si.Comp1Step = comp1.step;
                    si.Comp2Step = comp2.step;

                    bool isYuv = System.Text.RegularExpressions.Regex.IsMatch(si.PixelFormat.ToString(), "YU|YV", System.Text.RegularExpressions.RegexOptions.IgnoreCase);

                    // YUV Planar or Packed with half U/V (No Semi-Planar Support for Software)
                    if (isYuv && pixFmtDesc->nb_components == 3 &&
                        (comp0.depth == 8 && comp1.depth == 8 && comp2.depth == 8))/*
                                                                                    * ((comp0.step == 1 && comp1.step == 1 && comp2.step == 1) ||
                                                                                    * (comp0.step == 2 && comp1.step == 4 && comp2.step == 4)))*/
                    {
                        si.PixelFormatType = PixelFormatType.Software_Handled;
                    }
                }
            }
            else if (si.Type == AVMEDIA_TYPE_AUDIO)
            {
                si.SampleFormat    = (AVSampleFormat)Enum.ToObject(typeof(AVSampleFormat), st->codecpar->format);
                si.SampleFormatStr = si.SampleFormat.ToString().Replace("AV_SAMPLE_FMT_", "").ToLower();
                si.SampleRate      = st->codecpar->sample_rate;
                si.ChannelLayout   = st->codecpar->channel_layout;
                si.Channels        = st->codecpar->channels;
                si.Bits            = st->codecpar->bits_per_coded_sample;

                byte[] buf = new byte[50];
                fixed(byte *bufPtr = buf)
                {
                    av_get_channel_layout_string(bufPtr, 50, si.Channels, si.ChannelLayout);
                    si.ChannelLayoutStr = Utils.BytePtrToStringUTF8(bufPtr);
                }
            }

            si.Metadata = new Dictionary <string, string>();

            AVDictionaryEntry *b = null;

            while (true)
            {
                b = av_dict_get(st->metadata, "", b, AV_DICT_IGNORE_SUFFIX);
                if (b == null)
                {
                    break;
                }
                si.Metadata.Add(Utils.BytePtrToStringUTF8(b->key), Utils.BytePtrToStringUTF8(b->value));
            }

            foreach (var kv in si.Metadata)
            {
                if (kv.Key.ToLower() == "language" || kv.Key.ToLower() == "lang")
                {
                    si.Language = kv.Value; break;
                }
            }

            return(si);
        }
Example #26
0
        private unsafe void Ffmpeg_Initialize()
        {
            var inputBuffer = (byte *)ffmpeg.av_malloc((ulong)fsStreamSize);

            avioRead = Read;
            avioSeek = Seek;

            ff.ioContext = ffmpeg.avio_alloc_context(inputBuffer, fsStreamSize, 0, null, avioRead, null, avioSeek);

            if ((int)ff.ioContext == 0)
            {
                throw new FormatException("FFMPEG: Unable to allocate IO stream context.");
            }

            ff.format_context         = ffmpeg.avformat_alloc_context();
            ff.format_context->pb     = ff.ioContext;
            ff.format_context->flags |= ffmpeg.AVFMT_FLAG_CUSTOM_IO;

            fixed(AVFormatContext **fmt2 = &ff.format_context)
            if (ffmpeg.avformat_open_input(fmt2, "", null, null) != 0)
            {
                throw new FormatException("FFMPEG: Could not open media stream.");
            }

            if (ffmpeg.avformat_find_stream_info(ff.format_context, null) < 0)
            {
                throw new FormatException("FFMPEG: Could not retrieve stream info from IO stream");
            }

            // Find the index of the first audio stream
            this.stream_index = -1;
            for (int i = 0; i < ff.format_context->nb_streams; i++)
            {
                if (ff.format_context->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    stream_index = i;
                    break;
                }
            }
            if (stream_index == -1)
            {
                throw new FormatException("FFMPEG: Could not retrieve audio stream from IO stream.");
            }

            ff.av_stream = ff.format_context->streams[stream_index];
            ff.av_codec  = ff.av_stream->codec;

            if (ffmpeg.avcodec_open2(ff.av_codec, ffmpeg.avcodec_find_decoder(ff.av_codec->codec_id), null) < 0)
            {
                throw new FormatException("FFMPEG: Failed to open decoder for stream #{stream_index} in IO stream.");
            }

            // Fixes SWR @ 0x2192200] Input channel count and layout are unset error.
            if (ff.av_codec->channel_layout == 0)
            {
                ff.av_codec->channel_layout = (ulong)ffmpeg.av_get_default_channel_layout(ff.av_codec->channels);
            }

            ff.av_codec->request_channel_layout = (ulong)ffmpeg.av_get_default_channel_layout(ff.av_codec->channels);
            ff.av_codec->request_sample_fmt     = _DESIRED_SAMPLE_FORMAT;

            SetAudioFormat();

            ff.swr_context = ffmpeg.swr_alloc_set_opts(null,
                                                       ffmpeg.av_get_default_channel_layout(_DESIRED_CHANNEL_COUNT),
                                                       _DESIRED_SAMPLE_FORMAT,
                                                       _DESIRED_SAMPLE_RATE,
                                                       (long)ff.av_codec->channel_layout,
                                                       ff.av_codec->sample_fmt,
                                                       ff.av_codec->sample_rate,
                                                       0,
                                                       null);

            ffmpeg.swr_init(ff.swr_context);

            if (ffmpeg.swr_is_initialized(ff.swr_context) == 0)
            {
                throw new FormatException($"FFMPEG: Resampler has not been properly initialized");
            }

            ff.av_src_packet = ffmpeg.av_packet_alloc();
            ff.av_src_frame  = ffmpeg.av_frame_alloc();

            this.tempSampleBuf = new byte[(int)(_audioFormat.SampleRate * _audioFormat.Channels * 2)];

            this._slidestream = new CircularBuffer(tempSampleBuf.Length);

            AVDictionaryEntry *tag = null;

            this._audioMetaData      = new AudioMetadata();
            _audioMetaData.ExtraData = new Dictionary <string, string>();


            do
            {
                tag = ffmpeg.av_dict_get(ff.format_context->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX);

                if (tag == null)
                {
                    break;
                }

                var key = Marshal.PtrToStringAuto((IntPtr)tag->key);
                var val = Marshal.PtrToStringAuto((IntPtr)tag->value);

                switch (key.ToLowerInvariant().Trim())
                {
                case "title":
                    _audioMetaData.Title = val;
                    break;

                case "artist":
                case "artists":
                case "author":
                case "composer":
                    if (_audioMetaData.Artists is null)
                    {
                        _audioMetaData.Artists = new List <string>();
                    }

                    _audioMetaData.Artists.AddRange(val.Split(',', StringSplitOptions.RemoveEmptyEntries).Select(x => x.Trim()));
                    break;

                case "album":
                    _audioMetaData.Album = val;
                    break;

                case "genre":
                    if (_audioMetaData.Genre is null)
                    {
                        _audioMetaData.Genre = new List <string>();
                    }

                    _audioMetaData.Genre.AddRange(val.Split(',', StringSplitOptions.RemoveEmptyEntries).Select(x => x.Trim()));
                    break;

                case "year":
                    _audioMetaData.Year = val;
                    break;

                default:
                    _audioMetaData.ExtraData.Add(key, val);
                    break;
                }
            } while (true);

            if (_audioMetaData.Artists != null)
            {
                _audioMetaData.Artists = _audioMetaData.Artists.GroupBy(x => x).Select(y => y.First()).ToList();
            }
        }