Ejemplo n.º 1
0
        public void Should_Return_Watermark(
            bool alignAudio,
            bool deinterlace,
            bool intermittent,
            WatermarkLocation location,
            bool scaled,
            int opacity,
            string expectedVideoFilter,
            string expectedAudioLabel,
            string expectedVideoLabel)
        {
            var watermark = new ChannelWatermark
            {
                Mode = intermittent
                    ? ChannelWatermarkMode.Intermittent
                    : ChannelWatermarkMode.Permanent,
                DurationSeconds         = intermittent ? 15 : 0,
                FrequencyMinutes        = intermittent ? 10 : 0,
                Location                = location,
                Size                    = scaled ? WatermarkSize.Scaled : WatermarkSize.ActualSize,
                WidthPercent            = scaled ? 20 : 0,
                Opacity                 = opacity,
                HorizontalMarginPercent = 7,
                VerticalMarginPercent   = 5
            };

            Option <List <FadePoint> > maybeFadePoints = watermark.Mode == ChannelWatermarkMode.Intermittent
                ? Some(
                WatermarkCalculator.CalculateFadePoints(
                    new DateTimeOffset(2022, 01, 31, 12, 25, 0, TimeSpan.FromHours(-5)),
                    TimeSpan.Zero,
                    TimeSpan.FromMinutes(55),
                    TimeSpan.Zero,
                    watermark.FrequencyMinutes,
                    watermark.DurationSeconds))
                : None;

            FFmpegComplexFilterBuilder builder = new FFmpegComplexFilterBuilder()
                                                 .WithWatermark(
                Some(watermark),
                maybeFadePoints,
                new Resolution {
                Width = 1920, Height = 1080
            },
                None)
                                                 .WithDeinterlace(deinterlace)
                                                 .WithAlignedAudio(alignAudio ? Some(TimeSpan.FromMinutes(55)) : None);

            Option <FFmpegComplexFilter> result = builder.Build(false, 0, 0, 0, 1, false);

            result.IsSome.Should().BeTrue();
            result.IfSome(
                filter =>
            {
                filter.ComplexFilter.Should().Be(expectedVideoFilter);
                filter.AudioLabel.Should().Be(expectedAudioLabel);
                filter.VideoLabel.Should().Be(expectedVideoLabel);
            });
        }
Ejemplo n.º 2
0
    public void EntireVideoBetweenWatermarks_ShouldReturn_EmptyFadePointList()
    {
        List <FadePoint> actual = WatermarkCalculator.CalculateFadePoints(
            new DateTimeOffset(2022, 01, 31, 13, 34, 00, TimeSpan.FromHours(-5)),
            TimeSpan.Zero,
            TimeSpan.FromMinutes(5),
            None,
            15,
            10);

        actual.Should().HaveCount(0);
    }
Ejemplo n.º 3
0
    public async Task<Command> ForPlayoutItem(
        string ffmpegPath,
        string ffprobePath,
        bool saveReports,
        Channel channel,
        MediaVersion videoVersion,
        MediaVersion audioVersion,
        string videoPath,
        string audioPath,
        List<Subtitle> subtitles,
        string preferredAudioLanguage,
        string preferredSubtitleLanguage,
        ChannelSubtitleMode subtitleMode,
        DateTimeOffset start,
        DateTimeOffset finish,
        DateTimeOffset now,
        Option<ChannelWatermark> playoutItemWatermark,
        Option<ChannelWatermark> globalWatermark,
        VaapiDriver vaapiDriver,
        string vaapiDevice,
        bool hlsRealtime,
        FillerKind fillerKind,
        TimeSpan inPoint,
        TimeSpan outPoint,
        long ptsOffset,
        Option<int> targetFramerate)
    {
        MediaStream videoStream = await _ffmpegStreamSelector.SelectVideoStream(videoVersion);
        Option<MediaStream> maybeAudioStream =
            await _ffmpegStreamSelector.SelectAudioStream(
                audioVersion,
                channel.StreamingMode,
                channel.Number,
                preferredAudioLanguage);
        Option<Subtitle> maybeSubtitle =
            await _ffmpegStreamSelector.SelectSubtitleStream(
                videoVersion,
                subtitles,
                channel.StreamingMode,
                channel.Number,
                preferredSubtitleLanguage,
                subtitleMode);

        FFmpegPlaybackSettings playbackSettings = _playbackSettingsCalculator.CalculateSettings(
            channel.StreamingMode,
            channel.FFmpegProfile,
            videoVersion,
            videoStream,
            maybeAudioStream,
            start,
            now,
            inPoint,
            outPoint,
            hlsRealtime,
            targetFramerate);

        Option<WatermarkOptions> watermarkOptions =
            await _ffmpegProcessService.GetWatermarkOptions(
                ffprobePath,
                channel,
                playoutItemWatermark,
                globalWatermark,
                videoVersion,
                None,
                None);

        Option<List<FadePoint>> maybeFadePoints = watermarkOptions
            .Map(o => o.Watermark)
            .Flatten()
            .Where(wm => wm.Mode == ChannelWatermarkMode.Intermittent)
            .Map(
                wm =>
                    WatermarkCalculator.CalculateFadePoints(
                        start,
                        inPoint,
                        outPoint,
                        playbackSettings.StreamSeek,
                        wm.FrequencyMinutes,
                        wm.DurationSeconds));

        string audioFormat = playbackSettings.AudioFormat switch
        {
            FFmpegProfileAudioFormat.Aac => AudioFormat.Aac,
            FFmpegProfileAudioFormat.Ac3 => AudioFormat.Ac3,
            FFmpegProfileAudioFormat.Copy => AudioFormat.Copy,
            _ => throw new ArgumentOutOfRangeException($"unexpected audio format {playbackSettings.VideoFormat}")
        };

        var audioState = new AudioState(
            audioFormat,
            playbackSettings.AudioChannels,
            playbackSettings.AudioBitrate,
            playbackSettings.AudioBufferSize,
            playbackSettings.AudioSampleRate,
            videoPath == audioPath ? playbackSettings.AudioDuration : Option<TimeSpan>.None,
            playbackSettings.NormalizeLoudness);

        var ffmpegVideoStream = new VideoStream(
            videoStream.Index,
            videoStream.Codec,
            AvailablePixelFormats.ForPixelFormat(videoStream.PixelFormat, _logger),
            new FrameSize(videoVersion.Width, videoVersion.Height),
            videoVersion.RFrameRate,
            videoPath != audioPath); // still image when paths are different

        var videoInputFile = new VideoInputFile(videoPath, new List<VideoStream> { ffmpegVideoStream });

        Option<AudioInputFile> audioInputFile = maybeAudioStream.Map(
            audioStream =>
            {
                var ffmpegAudioStream = new AudioStream(audioStream.Index, audioStream.Codec, audioStream.Channels);
                return new AudioInputFile(audioPath, new List<AudioStream> { ffmpegAudioStream }, audioState);
            });

        Option<SubtitleInputFile> subtitleInputFile = maybeSubtitle.Map<Option<SubtitleInputFile>>(
            subtitle =>
            {
                if (!subtitle.IsImage && subtitle.SubtitleKind == SubtitleKind.Embedded && !subtitle.IsExtracted)
                {
                    _logger.LogWarning("Subtitles are not yet available for this item");
                    return None;
                }

                var ffmpegSubtitleStream = new ErsatzTV.FFmpeg.MediaStream(
                    subtitle.IsImage ? subtitle.StreamIndex : 0,
                    subtitle.Codec,
                    StreamKind.Video);

                string path = subtitle.IsImage
                    ? videoPath
                    : Path.Combine(FileSystemLayout.SubtitleCacheFolder, subtitle.Path);

                return new SubtitleInputFile(
                    path,
                    new List<ErsatzTV.FFmpeg.MediaStream> { ffmpegSubtitleStream },
                    false);

                // TODO: figure out HLS direct
                // channel.StreamingMode == StreamingMode.HttpLiveStreamingDirect);
            }).Flatten();

        Option<WatermarkInputFile> watermarkInputFile = GetWatermarkInputFile(watermarkOptions, maybeFadePoints);

        string videoFormat = playbackSettings.VideoFormat switch
        {
            FFmpegProfileVideoFormat.Hevc => VideoFormat.Hevc,
            FFmpegProfileVideoFormat.H264 => VideoFormat.H264,
            FFmpegProfileVideoFormat.Mpeg2Video => VideoFormat.Mpeg2Video,
            FFmpegProfileVideoFormat.Copy => VideoFormat.Copy,
            _ => throw new ArgumentOutOfRangeException($"unexpected video format {playbackSettings.VideoFormat}")
        };

        HardwareAccelerationMode hwAccel = playbackSettings.HardwareAcceleration switch
        {
            HardwareAccelerationKind.Nvenc => HardwareAccelerationMode.Nvenc,
            HardwareAccelerationKind.Qsv => HardwareAccelerationMode.Qsv,
            HardwareAccelerationKind.Vaapi => HardwareAccelerationMode.Vaapi,
            HardwareAccelerationKind.VideoToolbox => HardwareAccelerationMode.VideoToolbox,
            _ => HardwareAccelerationMode.None
        };

        OutputFormatKind outputFormat = channel.StreamingMode == StreamingMode.HttpLiveStreamingSegmenter
            ? OutputFormatKind.Hls
            : OutputFormatKind.MpegTs;

        Option<string> hlsPlaylistPath = outputFormat == OutputFormatKind.Hls
            ? Path.Combine(FileSystemLayout.TranscodeFolder, channel.Number, "live.m3u8")
            : Option<string>.None;

        Option<string> hlsSegmentTemplate = outputFormat == OutputFormatKind.Hls
            ? Path.Combine(FileSystemLayout.TranscodeFolder, channel.Number, "live%06d.ts")
            : Option<string>.None;

        // normalize songs to yuv420p
        Option<IPixelFormat> desiredPixelFormat =
            videoPath == audioPath ? ffmpegVideoStream.PixelFormat : new PixelFormatYuv420P();

        var desiredState = new FrameState(
            playbackSettings.RealtimeOutput,
            false, // TODO: fallback filler needs to loop
            videoFormat,
            desiredPixelFormat,
            await playbackSettings.ScaledSize.Map(ss => new FrameSize(ss.Width, ss.Height))
                .IfNoneAsync(new FrameSize(videoVersion.Width, videoVersion.Height)),
            new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height),
            playbackSettings.FrameRate,
            playbackSettings.VideoBitrate,
            playbackSettings.VideoBufferSize,
            playbackSettings.VideoTrackTimeScale,
            playbackSettings.Deinterlace);

        var ffmpegState = new FFmpegState(
            saveReports,
            hwAccel,
            VaapiDriverName(hwAccel, vaapiDriver),
            VaapiDeviceName(hwAccel, vaapiDevice),
            playbackSettings.StreamSeek,
            finish - now,
            channel.StreamingMode != StreamingMode.HttpLiveStreamingDirect,
            "ErsatzTV",
            channel.Name,
            maybeAudioStream.Map(s => Optional(s.Language)).Flatten(),
            outputFormat,
            hlsPlaylistPath,
            hlsSegmentTemplate,
            ptsOffset);

        _logger.LogDebug("FFmpeg desired state {FrameState}", desiredState);

        var pipelineBuilder = new PipelineBuilder(
            videoInputFile,
            audioInputFile,
            watermarkInputFile,
            subtitleInputFile,
            FileSystemLayout.FFmpegReportsFolder,
            FileSystemLayout.FontsCacheFolder,
            _logger);

        FFmpegPipeline pipeline = pipelineBuilder.Build(ffmpegState, desiredState);

        return GetCommand(ffmpegPath, videoInputFile, audioInputFile, watermarkInputFile, None, pipeline);
    }