Ejemplo n.º 1
0
 private static void RunApp(StreamerSettings streamerSettings)
 {
     Host.CreateDefaultBuilder()
     .UseSystemd()
     .ConfigureLogging((ctx, x) =>
     {
         x.SetMinimumLevel(LogLevel.Trace)
         .AddFile(Path.Combine(streamerSettings.OutputFolder, "app.log"))
         .AddConsole(t => t.Format = ConsoleLoggerFormat.Systemd);
     })
     .ConfigureServices((hostContext, services) =>
     {
         services.AddSingleton(streamerSettings);
         services.AddSingleton <DuetWifiStatusProvider>();
         services.AddHostedService <Worker>();
     })
     .Build()
     .Run();
 }
Ejemplo n.º 2
0
        public Worker(ILoggerFactory loggerFactory, DuetWifiStatusProvider statusProvider, StreamerSettings streamerSettings)
        {
            FFMpegSetup.Init(loggerFactory.CreateLogger <FFMpegSetup>());
            _logger = loggerFactory.CreateLogger <Worker>();
            _logger.LogInformation($"Starting with settings: {streamerSettings}");
            _statusProvider = statusProvider;
            if (streamerSettings.WebCamUrl != null)
            {
                _webCamera = new MJpegStreamWebCameraProvider(loggerFactory.CreateLogger <MJpegStreamWebCameraProvider>(), streamerSettings);
            }
            else if (streamerSettings.WebCamDevice != null)
            {
                _webCamera = new Video4LinuxWebCameraProvider(loggerFactory.CreateLogger <Video4LinuxWebCameraProvider>(), streamerSettings);
            }
            else
            {
                throw new ArgumentOutOfRangeException("Either webcamUrl or webcamDevice should be enabled");
            }

            _streamerSettings = streamerSettings;
        }
        public VideoWriter(ILogger logger, string filename, int width, int height, AVPixelFormat webcamPixelFormat, StreamerSettings settings)
        {
            _logger = logger;
            _fps    = settings.FPS;

            _videoFlipperConverter = new VideoFlipperConverter(width, height, webcamPixelFormat, settings);

            _h264Codec = ffmpeg.avcodec_find_encoder_by_name("h264_omx");
            if (_h264Codec == null)
            {
                _logger.LogError("Don't using hardware-accelerated h264 encoder, falling back to software one.");
                _h264Codec = ffmpeg.avcodec_find_encoder_by_name("libx264");
            }

            if (_h264Codec == null)
            {
                throw new InvalidOperationException("Codec not found.");
            }

            fixed(AVFormatContext **occ = &_h264AvFormatContext)
            {
                AVOutputFormat *fmt = ffmpeg.av_guess_format("mp4", null, null);

                ffmpeg.avformat_alloc_output_context2(occ, fmt, null, null);
                _h264Stream = ffmpeg.avformat_new_stream(_h264AvFormatContext, _h264Codec);
                _h264Stream->codec->width     = width;
                _h264Stream->codec->height    = height;
                _h264Stream->codec->time_base = new AVRational {
                    num = 1, den = _fps
                };
                _h264Stream->codec->pix_fmt  = AVPixelFormat.AV_PIX_FMT_YUV420P;
                _h264Stream->codec->bit_rate = 1_500_000;
                ffmpeg.av_opt_set(_h264Stream->codec->priv_data, "preset", "veryslow", 0);

                if ((_h264AvFormatContext->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0) // Some formats require a global header.
                {
                    _h264Stream->codec->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
                }
                ffmpeg.avcodec_open2(_h264Stream->codec, _h264Codec, null).ThrowExceptionIfError();
                _h264Stream->time_base = new AVRational()
                {
                    num = 1, den = _fps
                };

                ffmpeg.avio_open(&_h264AvFormatContext->pb, filename, ffmpeg.AVIO_FLAG_WRITE);
                ffmpeg.avformat_write_header(_h264AvFormatContext, null).ThrowExceptionIfError();
            }
        }
        public VideoFlipperConverter(int width, int height, AVPixelFormat inputPixelFormat, StreamerSettings settings)
        {
            string filters    = $"buffer=width={width}:height={height}:pix_fmt={(int)inputPixelFormat}:time_base=1/1:pixel_aspect=1/1 [in]; [out] buffersink;[in] format=pix_fmts=0 [in1];";
            int    inputCount = 1;

            if (settings.FlipY)
            {
                filters += $"[in{inputCount}] vflip [in{++inputCount}];";
            }
            if (settings.FlipX)
            {
                filters += $"[in{inputCount}] hflip [in{++inputCount}];";
            }

            filters += $"[in{inputCount}] copy [out]";
            AVFilterInOut *gis = null;
            AVFilterInOut *gos = null;

            _filterGraph = ffmpeg.avfilter_graph_alloc();
            ffmpeg.avfilter_graph_parse2(_filterGraph, filters, &gis, &gos).ThrowExceptionIfError();
            ffmpeg.avfilter_graph_config(_filterGraph, null).ThrowExceptionIfError();

            _filterSourceContext = ffmpeg.avfilter_graph_get_filter(_filterGraph, "Parsed_buffer_0");
            _filterSinkContext   = ffmpeg.avfilter_graph_get_filter(_filterGraph, "Parsed_buffersink_1");
            if (_filterSourceContext == null || _filterSinkContext == null)
            {
                throw new Exception("Failed to create filter sinks");
            }

            _flippedFrame = ffmpeg.av_frame_alloc();
            var flippedFrameBuffer = (byte *)ffmpeg.av_malloc((ulong)ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_YUV420P, width, height, 1));
            var dataArr            = new byte_ptrArray4();

            dataArr.UpdateFrom(_flippedFrame->data);
            var linesizeArr = new int_array4();

            linesizeArr.UpdateFrom(_flippedFrame->linesize);
            ffmpeg.av_image_fill_arrays(ref dataArr, ref linesizeArr, flippedFrameBuffer, AVPixelFormat.AV_PIX_FMT_YUV420P, width, height, 1);
            _flippedFrame->data.UpdateFrom(dataArr);
            _flippedFrame->linesize.UpdateFrom(linesizeArr);
        }
        public unsafe Video4LinuxWebCameraProvider(ILogger <Video4LinuxWebCameraProvider> logger, StreamerSettings settings)
        {
            _logger   = logger;
            _settings = settings;
            Width     = 1280;
            Height    = 720;

            AVDictionary *options = null;

            var ifmt = ffmpeg.av_find_input_format("v4l2");

            if (ifmt == null)
            {
                // throw new Exception("Cannot find input format");
            }

            _fmtCtx = ffmpeg.avformat_alloc_context();
            if (_fmtCtx == null)
            {
                throw new Exception("Cannot allocate input format (Out of memory?)");
            }

            // Enable non-blocking mode
            _fmtCtx->flags |= ffmpeg.AVFMT_FLAG_NONBLOCK;

            // framerate needs to set before opening the v4l2 device
            ffmpeg.av_dict_set(&options, "framerate", "15", 0);
            // This will not work if the camera does not support h264. In that case
            // remove this line. I wrote this for Raspberry Pi where the camera driver
            // can stream h264.
            // ffmpeg.av_dict_set(&options, "input_format", "h264", 0);
            ffmpeg.av_dict_set(&options, "video_size", Width + "x" + Height, 0);

            // open input file, and allocate format context
            fixed(AVFormatContext **fmtCtxAddr = &_fmtCtx)
            ffmpeg.avformat_open_input(fmtCtxAddr, settings.WebCamDevice, ifmt, &options).ThrowExceptionIfError();

            // retrieve stream information
            ffmpeg.avformat_find_stream_info(_fmtCtx, null).ThrowExceptionIfError();

            AVDictionary *opts = null;

            var ret1 = ffmpeg.av_find_best_stream(_fmtCtx, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, null, 0);

            ret1.ThrowExceptionIfError();
            _videoStreamIdx = ret1;
            var st = _fmtCtx->streams[_videoStreamIdx];

            // find decoder for the stream
            var decCtx = st->codec;
            var dec    = ffmpeg.avcodec_find_decoder(decCtx->codec_id);

            if (dec == null)
            {
                throw new Exception("Failed to find video codec");
            }

            // Init the decoders, with or without reference counting
            ffmpeg.av_dict_set(&opts, "refcounted_frames", "1", 0);
            ffmpeg.avcodec_open2(decCtx, dec, &opts).ThrowExceptionIfError();
            var video_stream = _fmtCtx->streams[_videoStreamIdx];

            _videoDecCtx = video_stream->codec;

            // dump input information to stderr
            ffmpeg.av_dump_format(_fmtCtx, 0, settings.WebCamDevice, 0);

            if (video_stream == null)
            {
                throw new Exception("Could not find video stream in the input, aborting");
            }

            _frame = ffmpeg.av_frame_alloc();

            _pkt       = ffmpeg.av_packet_alloc();
            _pkt->data = null;
            _pkt->size = 0;
        }
 public MJpegStreamWebCameraProvider(ILogger <MJpegStreamWebCameraProvider> logger, StreamerSettings settings)
 {
     _logger     = logger;
     _settings   = settings;
     _httpClient = new HttpClient();
 }
 public DuetWifiStatusProvider(StreamerSettings settings, ILogger <DuetWifiStatusProvider> logger)
 {
     _settings   = settings;
     _logger     = logger;
     _httpClient = new HttpClient();
 }