예제 #1
0
        public MediaFilterContext AddFilter(MediaFilter filter, MediaDictionary options, string contextName = null)
        {
            AVFilterContext *p = ffmpeg.avfilter_graph_alloc_filter(pFilterGraph, filter, contextName);

            ffmpeg.avfilter_init_dict(p, options).ThrowExceptionIfError();
            return(CreateAndUpdate(p));
        }
예제 #2
0
 internal MediaFilterContext(AVFilterContext *p)
 {
     if (p == null)
     {
         throw new FFmpegException(FFmpegException.NullReference);
     }
     pFilterContext = p;
     Filter         = new MediaFilter(p->filter);
 }
예제 #3
0
 public MediaFilterContext(IntPtr pAVFilterContext)
 {
     if (pAVFilterContext == IntPtr.Zero)
     {
         throw new FFmpegException(FFmpegException.NullReference);
     }
     pFilterContext = (AVFilterContext *)pAVFilterContext;
     Filter         = new MediaFilter(pFilterContext->filter);
 }
예제 #4
0
        internal FilterContext(AVFilterContext *pointer)
        {
            if (pointer == null)
            {
                throw new ArgumentNullException(nameof(pointer));
            }

            Pointer = pointer;
        }
예제 #5
0
        public MediaFilterContext AddFilter(MediaFilter filter, Action <MediaFilterContext> options, string contextName = null)
        {
            AVFilterContext *p = ffmpeg.avfilter_graph_alloc_filter(pFilterGraph, filter, contextName);

            if (options != null)
            {
                options.Invoke(new MediaFilterContext(p));
            }
            ffmpeg.avfilter_init_str(p, null).ThrowExceptionIfError();
            return(CreateAndUpdate(p));
        }
예제 #6
0
        public void Build()
        {
            AVFilterContext *formatCtx = AddFilter("format", string.Empty, string.Format("pix_fmts={0}", (int)AVPixelFormat.AV_PIX_FMT_BGRA));

            ffmpeg.avfilter_link(lastFilter, 0, formatCtx, 0);
            ffmpeg.avfilter_link(formatCtx, 0, buffersinkCtx, 0);
            if (ffmpeg.avfilter_graph_config(filterGraph, null) < 0)
            {
                throw new Exception("avfilter_graph_config\n");
            }
        }
예제 #7
0
        private MediaFilterContext CreateAndUpdate(AVFilterContext *pFilterContext)
        {
            MediaFilterContext filterContext = new MediaFilterContext(pFilterContext);

            if (filterContext.NbInputs == 0)
            {
                inputs.Add(filterContext);
            }
            else if (filterContext.NbOutputs == 0)
            {
                outputs.Add(filterContext);
            }
            return(filterContext);
        }
예제 #8
0
        private AVFilterGraph *init_filter_graph(AVFormatContext *format, AVCodecContext *codec, int audio_stream_index, AVFilterContext **buffersrc_ctx, AVFilterContext **buffersink_ctx)
        {
            // create graph
            var filter_graph = ffmpeg.avfilter_graph_alloc();

            // add input filter
            var abuffersrc = ffmpeg.avfilter_get_by_name("abuffer");
            var args       = string.Format("sample_fmt={0}:channel_layout={1}:sample_rate={2}:time_base={3}/{4}",
                                           (int)codec->sample_fmt,
                                           codec->channel_layout,
                                           codec->sample_rate,
                                           format->streams[audio_stream_index]->time_base.num,
                                           format->streams[audio_stream_index]->time_base.den);

            ffmpeg.avfilter_graph_create_filter(buffersrc_ctx, abuffersrc, "IN", args, null, filter_graph);

            // add output filter
            var abuffersink = ffmpeg.avfilter_get_by_name("abuffersink");

            ffmpeg.avfilter_graph_create_filter(buffersink_ctx, abuffersink, "OUT", "", null, filter_graph);

            AVFilterContext *_filter_ctx = null;

            for (var i = 0; i < filtersAudio.Count; i++)
            {
                var filter = ffmpeg.avfilter_get_by_name(filtersAudio[i].name);
                AVFilterContext *filter_ctx;
                ffmpeg.avfilter_graph_create_filter(&filter_ctx, filter, (filtersAudio[i].name + filtersAudio[i].key).ToUpper(), filtersAudio[i].args, null, filter_graph);

                if (i == 0)
                {
                    ffmpeg.avfilter_link(*buffersrc_ctx, 0, filter_ctx, 0);
                }
                if (_filter_ctx != null)
                {
                    ffmpeg.avfilter_link(_filter_ctx, 0, filter_ctx, 0);
                }
                if (i == filtersAudio.Count - 1)
                {
                    ffmpeg.avfilter_link(filter_ctx, 0, *buffersink_ctx, 0);
                }

                _filter_ctx = filter_ctx;
            }
            ffmpeg.avfilter_graph_config(filter_graph, null);

            return(filter_graph);
        }
예제 #9
0
        AVFilterContext *AddFilter(string filterName, string instanceName, string instanceParam)
        {
            AVFilterContext *newFilter = null;

            if (string.IsNullOrEmpty(instanceName))
            {
                filterLen++;
                instanceName = string.Format("filterinstance{0}", filterLen);
            }

            if (ffmpeg.avfilter_graph_create_filter(&newFilter, ffmpeg.avfilter_get_by_name(filterName), instanceName, instanceParam, null, filterGraph) < 0)
            {
                throw new Exception("Cannot create buffer source");
            }
            return(newFilter);
        }
예제 #10
0
    public void Build()
    {
        _rawFilterGraph = ffmpeg.avfilter_graph_alloc();
        if (_rawFilterGraph == null)
        {
            throw new FFmpegException("Unable to create filter graph.");
        }

        var aBuffer = ffmpeg.avfilter_get_by_name("abuffer");

        if (aBuffer == null)
        {
            throw new FFmpegException("Could not find the abuffer filter.");
        }

        _src = ffmpeg.avfilter_graph_alloc_filter(_rawFilterGraph, aBuffer, "src");
        if (_src == null)
        {
            throw new FFmpegException("Could not allocate the abuffer instance.");
        }

        ffmpeg.av_opt_set(_src, "channel_layout", _sourceStream.ChannelLayout !, ffmpeg.AV_OPT_SEARCH_CHILDREN);
        ffmpeg.av_opt_set(
            _src,
            "sample_fmt",
            ffmpeg.av_get_sample_fmt_name(_sourceStream.SampleFormat),
            ffmpeg.AV_OPT_SEARCH_CHILDREN);
        ffmpeg.av_opt_set_q(_src, "time_base", _sourceStream.TimeBase, ffmpeg.AV_OPT_SEARCH_CHILDREN);
        ffmpeg.av_opt_set_int(_src, "sample_rate", _sourceStream.SampleRate, ffmpeg.AV_OPT_SEARCH_CHILDREN);

        ffmpeg.avfilter_init_str(_src, null).ThrowExceptionIfError();

        var aBufferSink = ffmpeg.avfilter_get_by_name("abuffersink");

        if (aBufferSink == null)
        {
            throw new FFmpegException("Could not find the abuffersink filter.");
        }

        _dst = ffmpeg.avfilter_graph_alloc_filter(_rawFilterGraph, aBufferSink, "dst");
        ffmpeg.avfilter_init_str(_dst, null).ThrowExceptionIfError();

        LinkFilterGraph(_rawFilterGraph, _src, _dst);

        ffmpeg.avfilter_graph_config(_rawFilterGraph, null).ThrowExceptionIfError();
    }
예제 #11
0
        public void AddText(string text, int x, int y)
        {
            int fsize = avFrame->width / 20;

            if (fsize < 18)
            {
                fsize = 18;
            }
            int bordersize = fsize / 20;

            if (bordersize < 1)
            {
                bordersize = 1;
            }
            AVFilterContext *drawCtx = AddFilter("drawtext", null, string.Format("borderw={0}:bordercolor=black:fontfile=simsun.ttc:fontcolor=white:fontsize={1}:x={2}:y={3}:text={4}", bordersize, fsize, x, y, text));

            ffmpeg.avfilter_link(lastFilter, 0, drawCtx, 0);
            lastFilter = drawCtx;
        }
예제 #12
0
        public void AddLogo(string path)
        {
            var logoCtx = AddFilter("movie", null, string.Format("filename={0}", path));
            int ow      = avFrame->width / 5;

            if (ow < 10)
            {
                ow = 10;
            }
            ;
            var logoScale = AddFilter("scale", null, string.Format("w={0}:h=ow*ih/iw", ow));

            ffmpeg.avfilter_link(logoCtx, 0, logoScale, 0);
            var overCtx = AddFilter("overlay", null, "main_w-overlay_w-10:main_h-overlay_h-10");

            ffmpeg.avfilter_link(lastFilter, 0, overCtx, 0);
            ffmpeg.avfilter_link(logoScale, 0, overCtx, 1);
            lastFilter = overCtx;
        }
        public VideoFlipperConverter(int width, int height, AVPixelFormat inputPixelFormat, StreamerSettings settings)
        {
            string filters    = $"buffer=width={width}:height={height}:pix_fmt={(int)inputPixelFormat}:time_base=1/1:pixel_aspect=1/1 [in]; [out] buffersink;[in] format=pix_fmts=0 [in1];";
            int    inputCount = 1;

            if (settings.FlipY)
            {
                filters += $"[in{inputCount}] vflip [in{++inputCount}];";
            }
            if (settings.FlipX)
            {
                filters += $"[in{inputCount}] hflip [in{++inputCount}];";
            }

            filters += $"[in{inputCount}] copy [out]";
            AVFilterInOut *gis = null;
            AVFilterInOut *gos = null;

            _filterGraph = ffmpeg.avfilter_graph_alloc();
            ffmpeg.avfilter_graph_parse2(_filterGraph, filters, &gis, &gos).ThrowExceptionIfError();
            ffmpeg.avfilter_graph_config(_filterGraph, null).ThrowExceptionIfError();

            _filterSourceContext = ffmpeg.avfilter_graph_get_filter(_filterGraph, "Parsed_buffer_0");
            _filterSinkContext   = ffmpeg.avfilter_graph_get_filter(_filterGraph, "Parsed_buffersink_1");
            if (_filterSourceContext == null || _filterSinkContext == null)
            {
                throw new Exception("Failed to create filter sinks");
            }

            _flippedFrame = ffmpeg.av_frame_alloc();
            var flippedFrameBuffer = (byte *)ffmpeg.av_malloc((ulong)ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_YUV420P, width, height, 1));
            var dataArr            = new byte_ptrArray4();

            dataArr.UpdateFrom(_flippedFrame->data);
            var linesizeArr = new int_array4();

            linesizeArr.UpdateFrom(_flippedFrame->linesize);
            ffmpeg.av_image_fill_arrays(ref dataArr, ref linesizeArr, flippedFrameBuffer, AVPixelFormat.AV_PIX_FMT_YUV420P, width, height, 1);
            _flippedFrame->data.UpdateFrom(dataArr);
            _flippedFrame->linesize.UpdateFrom(linesizeArr);
        }
예제 #14
0
        public FFmpegFilter(AVStream *stream, System.Drawing.Size vsize)
        {
            var p     = stream->codecpar;
            var frate = stream->r_frame_rate;

            filterLen    = 0;
            filterGraph  = ffmpeg.avfilter_graph_alloc();
            buffersrcCtx = AddFilter("buffer", "in", string.Format("video_size={0}x{1}:pix_fmt={2}:time_base={3}/{4}",
                                                                   p->width, p->height, p->format, frate.num, frate.den));
            buffersinkCtx   = AddFilter("buffersink", "out", string.Empty);
            avFrame         = ffmpeg.av_frame_alloc();
            avFrame->width  = vsize.Width;
            avFrame->height = vsize.Height;
            AVFilterContext *srcScale = AddFilter("scale", string.Empty, string.Format("w={0}:h={1}", avFrame->width, avFrame->height));

            ffmpeg.avfilter_link(buffersrcCtx, 0, srcScale, 0);
            lastFilter = srcScale;

            convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGRA, avFrame->width, avFrame->height, 1);
            convertedFrameBufferPtr  = Marshal.AllocHGlobal(convertedFrameBufferSize);
        }
예제 #15
0
    public override unsafe void LinkFilterGraph(AVFilterGraph *filterGraph, AVFilterContext *src, AVFilterContext *dst)
    {
        var aformatFilter = ffmpeg.avfilter_get_by_name("aformat");

        if (aformatFilter == null)
        {
            throw new FFmpegException("Could not find aformat filter");
        }

        AVFilterContext *aformat;

        ffmpeg.avfilter_graph_create_filter(
            &aformat,
            aformatFilter,
            "aformat",
            _args,
            null,
            filterGraph)
        .ThrowExceptionIfError();

        ffmpeg.avfilter_link(src, 0, aformat, 0);
        ffmpeg.avfilter_link(aformat, 0, dst, 0);
    }
예제 #16
0
 public static extern int avfilter_init_dict(AVFilterContext * @ctx, AVDictionary ** @options);
예제 #17
0
 public static extern int avfilter_graph_add_filter(AVFilterGraph * @graphctx, AVFilterContext * @filter);
예제 #18
0
        public void ReadSamples(string inputAudio, Func<byte[], int, bool> readSampleCallback)
        {
            if (readSampleCallback == null) return;

            const int EAGAIN = 11;

            var brk = false;
            var packet = new AVPacket();

            try
            {
                int ret = open_input(inputAudio);
                if (ret < 0)
                {
                    return;
                }

                byte[] buffer = null, tbuffer = null;

                while (true)
                {
                    ffmpeg.av_init_packet(&packet);

                    if (_audioCodecContext != null && buffer == null)
                    {
                        buffer = new byte[_audioCodecContext->sample_rate * 2];
                        tbuffer = new byte[_audioCodecContext->sample_rate * 2];
                    }

                    ret = ffmpeg.av_read_frame(fmt_ctx, &packet);
                    if (ret < 0)
                    {
                        break;
                    }

                    if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT)
                    {
                        break;
                    }

                    if (packet.stream_index == audio_stream_index)
                    {
                        var s = 0;
                        fixed (byte** outPtrs = new byte*[32])
                        {
                            fixed (byte* bPtr = &tbuffer[0])
                            {
                                outPtrs[0] = bPtr;

                                AVFrame* _af = null;
                                var af = ffmpeg.av_frame_alloc();
                                var ff = ffmpeg.av_frame_alloc();

                                ffmpeg.avcodec_send_packet(_audioCodecContext, &packet);
                                do
                                {
                                    ret = ffmpeg.avcodec_receive_frame(_audioCodecContext, af);
                                    if (ret == 0)
                                    {
                                        if (filter_graph != null)
                                        {
                                            // add the frame into the filter graph
                                            ffmpeg.av_buffersrc_add_frame(buffersrc_ctx, af);

                                            // get the frame out from the filter graph
                                            ret = ffmpeg.av_buffersink_get_frame(buffersink_ctx, ff);

                                            if (ret == -EAGAIN)
                                                break;

                                            _af = ff;
                                        }
                                        else
                                        {
                                            _af = af;
                                        }

                                        fixed (byte** datptr = _af->data.ToArray())
                                        {
                                            var numSamplesOut = ffmpeg.swr_convert(_swrContext,
                                                outPtrs,
                                                _audioCodecContext->sample_rate,
                                                datptr,
                                                _af->nb_samples);

                                            if (numSamplesOut > 0)
                                            {
                                                var l = numSamplesOut * 2 * channels;
                                                Buffer.BlockCopy(tbuffer, 0, buffer, s, l);
                                                s += l;
                                            }
                                            else
                                            {
                                                ret = numSamplesOut; //(error)
                                            }
                                        }

                                        if (_af->decode_error_flags > 0) break;
                                    }

                                } while (ret == 0);
                                ffmpeg.av_frame_free(&ff);
                                ffmpeg.av_frame_free(&af);

                                if (s > 0)
                                {
                                    var ba = new byte[s];
                                    Buffer.BlockCopy(buffer, 0, ba, 0, s);

                                    if (readSampleCallback(ba, s))
                                    {
                                        brk = true;
                                        break;
                                    }
                                }
                            }
                        }
                    }
                    ffmpeg.av_packet_unref(&packet);

                    if (ret == -EAGAIN)
                    {
                        Thread.Sleep(10);
                    }

                    if (brk) break;
                }
            }
            catch (Exception ex)
            {
                throw ex;
            }
            finally
            {
                if (_audioCodecContext != null)
                {
                    ffmpeg.avcodec_close(_audioCodecContext);
                }

                if (fmt_ctx != null)
                {
                    fixed (AVFormatContext** at = &fmt_ctx)
                    {
                        ffmpeg.avformat_close_input(at);
                    }
                }

                fmt_ctx = null;
                _audioCodecContext = null;
                _audioStream = null;

                if (_swrContext != null)
                {
                    fixed (SwrContext** s = &_swrContext)
                    {
                        ffmpeg.swr_free(s);
                    }

                    _swrContext = null;
                }

                if (filter_graph != null)
                {
                    fixed (AVFilterGraph** f = &filter_graph)
                    {
                        ffmpeg.avfilter_graph_free(f);
                    }

                    filter_graph = null;
                    buffersink_ctx = null;
                    buffersrc_ctx = null;

                    filtersAudio.Clear();
                }
            }
        }
예제 #19
0
        private void InitializeFilterGraph(AVFrame *frame)
        {
            // References: https://www.ffmpeg.org/doxygen/2.0/doc_2examples_2filtering_audio_8c-example.html
            const string SourceFilterName     = "abuffer";
            const string SourceFilterInstance = "audio_buffer";
            const string SinkFilterName       = "abuffersink";
            const string SinkFilterInstance   = "audio_buffersink";

            // Get a snapshot of the FilterString
            var filterString = FilterString;

            // For empty filter strings ensure filtegraph is destroyed
            if (string.IsNullOrWhiteSpace(filterString))
            {
                DestroyFilterGraph();
                return;
            }

            // Recreate the filtergraph if we have to
            if (filterString != AppliedFilterString)
            {
                DestroyFilterGraph();
            }

            // Ensure the filtergraph is compatible with the frame
            var filterArguments = ComputeFilterArguments(frame);

            if (filterArguments != CurrentFilterArguments)
            {
                DestroyFilterGraph();
            }
            else
            {
                return;
            }

            FilterGraph = ffmpeg.avfilter_graph_alloc();
            RC.Current.Add(FilterGraph);

            try
            {
                AVFilterContext *sourceFilterRef = null;
                AVFilterContext *sinkFilterRef   = null;

                var result = ffmpeg.avfilter_graph_create_filter(
                    &sourceFilterRef, ffmpeg.avfilter_get_by_name(SourceFilterName), SourceFilterInstance, filterArguments, null, FilterGraph);
                if (result != 0)
                {
                    throw new MediaContainerException(
                              $"{nameof(ffmpeg.avfilter_graph_create_filter)} ({SourceFilterInstance}) failed. Error {result}: {FFInterop.DecodeMessage(result)}");
                }

                result = ffmpeg.avfilter_graph_create_filter(
                    &sinkFilterRef, ffmpeg.avfilter_get_by_name(SinkFilterName), SinkFilterInstance, null, null, FilterGraph);
                if (result != 0)
                {
                    throw new MediaContainerException(
                              $"{nameof(ffmpeg.avfilter_graph_create_filter)} ({SinkFilterInstance}) failed. Error {result}: {FFInterop.DecodeMessage(result)}");
                }

                SourceFilter = sourceFilterRef;
                SinkFilter   = sinkFilterRef;

                if (string.IsNullOrWhiteSpace(filterString))
                {
                    result = ffmpeg.avfilter_link(SourceFilter, 0, SinkFilter, 0);
                    if (result != 0)
                    {
                        throw new MediaContainerException($"{nameof(ffmpeg.avfilter_link)} failed. Error {result}: {FFInterop.DecodeMessage(result)}");
                    }
                }
                else
                {
                    var initFilterCount = FilterGraph->nb_filters;

                    SourceOutput             = ffmpeg.avfilter_inout_alloc();
                    SourceOutput->name       = ffmpeg.av_strdup("in");
                    SourceOutput->filter_ctx = SourceFilter;
                    SourceOutput->pad_idx    = 0;
                    SourceOutput->next       = null;

                    SinkInput             = ffmpeg.avfilter_inout_alloc();
                    SinkInput->name       = ffmpeg.av_strdup("out");
                    SinkInput->filter_ctx = SinkFilter;
                    SinkInput->pad_idx    = 0;
                    SinkInput->next       = null;

                    result = ffmpeg.avfilter_graph_parse(FilterGraph, filterString, SinkInput, SourceOutput, null);
                    if (result != 0)
                    {
                        throw new MediaContainerException($"{nameof(ffmpeg.avfilter_graph_parse)} failed. Error {result}: {FFInterop.DecodeMessage(result)}");
                    }

                    // Reorder the filters to ensure that inputs of the custom filters are merged first
                    for (var i = 0; i < FilterGraph->nb_filters - initFilterCount; i++)
                    {
                        var sourceAddress = FilterGraph->filters[i];
                        var targetAddress = FilterGraph->filters[i + initFilterCount];
                        FilterGraph->filters[i] = targetAddress;
                        FilterGraph->filters[i + initFilterCount] = sourceAddress;
                    }
                }

                result = ffmpeg.avfilter_graph_config(FilterGraph, null);
                if (result != 0)
                {
                    throw new MediaContainerException($"{nameof(ffmpeg.avfilter_graph_config)} failed. Error {result}: {FFInterop.DecodeMessage(result)}");
                }
            }
            catch (Exception ex)
            {
                this.LogError(Aspects.Component, $"Audio filter graph could not be built: {filterString}.", ex);
                DestroyFilterGraph();
            }
            finally
            {
                CurrentFilterArguments = filterArguments;
                AppliedFilterString    = filterString;
            }
        }
예제 #20
0
 public static extern int av_buffersink_get_samples(AVFilterContext * @ctx, AVFrame * @frame, int @nb_samples);
예제 #21
0
 public static extern void avfilter_free(AVFilterContext * @filter);
예제 #22
0
 public static extern AVRational av_buffersink_get_frame_rate(AVFilterContext * @ctx);
예제 #23
0
 public static extern int av_buffersink_get_frame(AVFilterContext * @ctx, AVFrame * @frame);
예제 #24
0
 public static extern int av_buffersink_get_frame_flags(AVFilterContext * @ctx, AVFrame * @frame, int @flags);
예제 #25
0
 public static extern void av_buffersink_set_frame_size(AVFilterContext * @ctx, uint @frame_size);
예제 #26
0
 public static extern int av_buffersrc_add_frame_flags(AVFilterContext * @buffer_src, AVFrame * @frame, int @flags);
예제 #27
0
 public static extern int av_buffersrc_add_frame(AVFilterContext * @ctx, AVFrame * @frame);
예제 #28
0
 public static extern int av_buffersrc_parameters_set(AVFilterContext * @ctx, AVBufferSrcParameters * @param);
예제 #29
0
 public static extern uint av_buffersrc_get_nb_failed_requests(AVFilterContext * @buffer_src);
예제 #30
0
 public static extern int avfilter_insert_filter(AVFilterLink * @link, AVFilterContext * @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx);