Esempio n. 1
0
        public static SwsContext Create(VideoFormatInfo source, VideoFormatInfo target, SwsFlags flags)
        {
            var pointer = ffmpeg.sws_getContext(
                source.Width,
                source.Height,
                source.PixelFormat.ToAVFormat(),
                target.Width,
                target.Height,
                target.PixelFormat.ToAVFormat(),
                flags: (int)flags,
                srcFilter: null,
                dstFilter: null,
                param: null
                );

            return(new SwsContext(pointer));
        }
Esempio n. 2
0
        public FilterContext AddSource(VideoFormatInfo format)
        {
            AVFilterContext *sourceFilterContext;

            var pixelFormatName = ffmpeg.av_get_pix_fmt_name(format.PixelFormat.ToAVFormat());

            var args = string.Format(
                "video_size={0}x{1}:pix_fmt={2}:time_base={3}/{4}:pixel_aspect={5}/{6}",
                format.Width,
                format.Height,
                pixelFormatName,
                format.TimeBase.Numerator,
                format.TimeBase.Denominator,
                format.AspectRatio.Numerator,
                format.AspectRatio.Denominator
                );

            var sourceFilter = Filter.FromName("buffer");

            ffmpeg.avfilter_graph_create_filter(&sourceFilterContext, sourceFilter.Pointer, "in", args, null, pointer).EnsureSuccess();

            return(new FilterContext(sourceFilterContext));
        }
Esempio n. 3
0
        public static FilterGraph Create(
            Codec decoder,
            Codec encoder,
            string filterSpecification)
        {
            if (decoder is null)
            {
                throw new ArgumentNullException(nameof(decoder));
            }
            if (encoder is null)
            {
                throw new ArgumentNullException(nameof(encoder));
            }

            FilterContext bufferSource = default;
            FilterContext bufferSink   = default;

            var graph = new FilterGraph();

            if (decoder is VideoDecoder)
            {
                var format = new VideoFormatInfo(
                    decoder.Context.PixelFormat,
                    decoder.Context.Width,
                    decoder.Context.Height,
                    decoder.Context.TimeBase,
                    decoder.Context.AspectRatio
                    );

                bufferSource = graph.AddSource(format);

                bufferSink = graph.AddSink(Filter.FromName("buffersink"));

                bufferSink.SetOption("pix_fmts", (int)encoder.Context.PixelFormat.ToAVFormat());
            }
            else if (decoder is AudioDecoder audioDecoder)
            {
                if (audioDecoder.Context.ChannelLayout == ChannelLayout.Unknown)
                {
                    throw new Exception("Invalid ChannelLayout:" + audioDecoder.Context.ChannelLayout);

                    // decoder.Context.ChannelLayout = (ChannelLayout)ffmpeg.av_get_default_channel_layout(decoder.Context.ChannelCount);
                }

                bufferSource = graph.AddSource(new AudioFormatInfo(
                                                   sampleFormat: audioDecoder.Context.SampleFormat,
                                                   sampleRate: audioDecoder.Context.SampleRate,
                                                   channelCount: audioDecoder.Context.ChannelCount,
                                                   channelLayout: audioDecoder.Context.ChannelLayout
                                                   ));

                bufferSink = graph.AddSink(Filter.FromName("abuffersink"));

                // $"aresample={resampledAudioFormat.SampleRate},aformat=sample_fmts={sampleFormat}:channel_layouts=stereo,asetnsamples=n=1024:p=0"

                // var sampleFormatName = ffmpeg.av_get_sample_fmt_name(encoder.Context.SampleFormat.ToAVFormat());

                bufferSink.SetOption("sample_fmts", (int)encoder.Context.SampleFormat.ToAVFormat());
                bufferSink.SetOption("channel_layouts", (ulong)encoder.Context.ChannelLayout);
                bufferSink.SetOption("sample_rates", encoder.Context.SampleRate);
            }
            else
            {
                throw new Exception("Invalid codec");
            }


            graph.Outputs = new FilterInOut("in", bufferSource);
            graph.Inputs  = new FilterInOut("out", bufferSink);

            graph.bufferSource = bufferSource;
            graph.bufferSink   = bufferSink;

            if (filterSpecification != null)
            {
                graph.Parse(filterSpecification);
            }
            else
            {
                ffmpeg.avfilter_link(bufferSource.Pointer, 0, bufferSink.Pointer, 0);
            }

            graph.Initialize();

            return(graph);
        }
Esempio n. 4
0
 public VideoResampler(VideoFormatInfo source, VideoFormatInfo target, SwsFlags flags = SwsFlags.FastBilinear)
 {
     Source  = source ?? throw new ArgumentNullException(nameof(source));
     Target  = target ?? throw new ArgumentNullException(nameof(target));
     Context = SwsContext.Create(source, target, flags);
 }
Esempio n. 5
0
 public static int GetBufferSize(this VideoFormatInfo format, int align)
 {
     return(ffmpeg.av_image_get_buffer_size(format.PixelFormat.ToAVFormat(), format.Width, format.Height, align));
 }
Esempio n. 6
0
 public static int[] GetStrides(this VideoFormatInfo format, int align)
 {
     return(GetStrides(format.PixelFormat, format.Width, align));
 }
Esempio n. 7
0
 public VideoFrame(VideoFormatInfo format)
     : this(format.PixelFormat, format.Width, format.Height)
 {
 }