Example #1
0
    public Command ConcatChannel(string ffmpegPath, bool saveReports, Channel channel, string scheme, string host)
    {
        var resolution = new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height);

        var concatInputFile = new ConcatInputFile(
            $"http://localhost:{Settings.ListenPort}/ffmpeg/concat/{channel.Number}",
            resolution);

        var pipelineBuilder = new PipelineBuilder(
            None,
            None,
            None,
            None,
            FileSystemLayout.FFmpegReportsFolder,
            FileSystemLayout.FontsCacheFolder,
            _logger);

        FFmpegPipeline pipeline = pipelineBuilder.Concat(
            concatInputFile,
            FFmpegState.Concat(saveReports, channel.Name));

        return GetCommand(ffmpegPath, None, None, None, concatInputFile, pipeline);
    }
Example #2
0
    public async Task<Command> ForPlayoutItem(
        string ffmpegPath,
        string ffprobePath,
        bool saveReports,
        Channel channel,
        MediaVersion videoVersion,
        MediaVersion audioVersion,
        string videoPath,
        string audioPath,
        List<Subtitle> subtitles,
        string preferredAudioLanguage,
        string preferredSubtitleLanguage,
        ChannelSubtitleMode subtitleMode,
        DateTimeOffset start,
        DateTimeOffset finish,
        DateTimeOffset now,
        Option<ChannelWatermark> playoutItemWatermark,
        Option<ChannelWatermark> globalWatermark,
        VaapiDriver vaapiDriver,
        string vaapiDevice,
        bool hlsRealtime,
        FillerKind fillerKind,
        TimeSpan inPoint,
        TimeSpan outPoint,
        long ptsOffset,
        Option<int> targetFramerate)
    {
        MediaStream videoStream = await _ffmpegStreamSelector.SelectVideoStream(videoVersion);
        Option<MediaStream> maybeAudioStream =
            await _ffmpegStreamSelector.SelectAudioStream(
                audioVersion,
                channel.StreamingMode,
                channel.Number,
                preferredAudioLanguage);
        Option<Subtitle> maybeSubtitle =
            await _ffmpegStreamSelector.SelectSubtitleStream(
                videoVersion,
                subtitles,
                channel.StreamingMode,
                channel.Number,
                preferredSubtitleLanguage,
                subtitleMode);

        FFmpegPlaybackSettings playbackSettings = _playbackSettingsCalculator.CalculateSettings(
            channel.StreamingMode,
            channel.FFmpegProfile,
            videoVersion,
            videoStream,
            maybeAudioStream,
            start,
            now,
            inPoint,
            outPoint,
            hlsRealtime,
            targetFramerate);

        Option<WatermarkOptions> watermarkOptions =
            await _ffmpegProcessService.GetWatermarkOptions(
                ffprobePath,
                channel,
                playoutItemWatermark,
                globalWatermark,
                videoVersion,
                None,
                None);

        Option<List<FadePoint>> maybeFadePoints = watermarkOptions
            .Map(o => o.Watermark)
            .Flatten()
            .Where(wm => wm.Mode == ChannelWatermarkMode.Intermittent)
            .Map(
                wm =>
                    WatermarkCalculator.CalculateFadePoints(
                        start,
                        inPoint,
                        outPoint,
                        playbackSettings.StreamSeek,
                        wm.FrequencyMinutes,
                        wm.DurationSeconds));

        string audioFormat = playbackSettings.AudioFormat switch
        {
            FFmpegProfileAudioFormat.Aac => AudioFormat.Aac,
            FFmpegProfileAudioFormat.Ac3 => AudioFormat.Ac3,
            FFmpegProfileAudioFormat.Copy => AudioFormat.Copy,
            _ => throw new ArgumentOutOfRangeException($"unexpected audio format {playbackSettings.VideoFormat}")
        };

        var audioState = new AudioState(
            audioFormat,
            playbackSettings.AudioChannels,
            playbackSettings.AudioBitrate,
            playbackSettings.AudioBufferSize,
            playbackSettings.AudioSampleRate,
            videoPath == audioPath ? playbackSettings.AudioDuration : Option<TimeSpan>.None,
            playbackSettings.NormalizeLoudness);

        var ffmpegVideoStream = new VideoStream(
            videoStream.Index,
            videoStream.Codec,
            AvailablePixelFormats.ForPixelFormat(videoStream.PixelFormat, _logger),
            new FrameSize(videoVersion.Width, videoVersion.Height),
            videoVersion.RFrameRate,
            videoPath != audioPath); // still image when paths are different

        var videoInputFile = new VideoInputFile(videoPath, new List<VideoStream> { ffmpegVideoStream });

        Option<AudioInputFile> audioInputFile = maybeAudioStream.Map(
            audioStream =>
            {
                var ffmpegAudioStream = new AudioStream(audioStream.Index, audioStream.Codec, audioStream.Channels);
                return new AudioInputFile(audioPath, new List<AudioStream> { ffmpegAudioStream }, audioState);
            });

        Option<SubtitleInputFile> subtitleInputFile = maybeSubtitle.Map<Option<SubtitleInputFile>>(
            subtitle =>
            {
                if (!subtitle.IsImage && subtitle.SubtitleKind == SubtitleKind.Embedded && !subtitle.IsExtracted)
                {
                    _logger.LogWarning("Subtitles are not yet available for this item");
                    return None;
                }

                var ffmpegSubtitleStream = new ErsatzTV.FFmpeg.MediaStream(
                    subtitle.IsImage ? subtitle.StreamIndex : 0,
                    subtitle.Codec,
                    StreamKind.Video);

                string path = subtitle.IsImage
                    ? videoPath
                    : Path.Combine(FileSystemLayout.SubtitleCacheFolder, subtitle.Path);

                return new SubtitleInputFile(
                    path,
                    new List<ErsatzTV.FFmpeg.MediaStream> { ffmpegSubtitleStream },
                    false);

                // TODO: figure out HLS direct
                // channel.StreamingMode == StreamingMode.HttpLiveStreamingDirect);
            }).Flatten();

        Option<WatermarkInputFile> watermarkInputFile = GetWatermarkInputFile(watermarkOptions, maybeFadePoints);

        string videoFormat = playbackSettings.VideoFormat switch
        {
            FFmpegProfileVideoFormat.Hevc => VideoFormat.Hevc,
            FFmpegProfileVideoFormat.H264 => VideoFormat.H264,
            FFmpegProfileVideoFormat.Mpeg2Video => VideoFormat.Mpeg2Video,
            FFmpegProfileVideoFormat.Copy => VideoFormat.Copy,
            _ => throw new ArgumentOutOfRangeException($"unexpected video format {playbackSettings.VideoFormat}")
        };

        HardwareAccelerationMode hwAccel = playbackSettings.HardwareAcceleration switch
        {
            HardwareAccelerationKind.Nvenc => HardwareAccelerationMode.Nvenc,
            HardwareAccelerationKind.Qsv => HardwareAccelerationMode.Qsv,
            HardwareAccelerationKind.Vaapi => HardwareAccelerationMode.Vaapi,
            HardwareAccelerationKind.VideoToolbox => HardwareAccelerationMode.VideoToolbox,
            _ => HardwareAccelerationMode.None
        };

        OutputFormatKind outputFormat = channel.StreamingMode == StreamingMode.HttpLiveStreamingSegmenter
            ? OutputFormatKind.Hls
            : OutputFormatKind.MpegTs;

        Option<string> hlsPlaylistPath = outputFormat == OutputFormatKind.Hls
            ? Path.Combine(FileSystemLayout.TranscodeFolder, channel.Number, "live.m3u8")
            : Option<string>.None;

        Option<string> hlsSegmentTemplate = outputFormat == OutputFormatKind.Hls
            ? Path.Combine(FileSystemLayout.TranscodeFolder, channel.Number, "live%06d.ts")
            : Option<string>.None;

        // normalize songs to yuv420p
        Option<IPixelFormat> desiredPixelFormat =
            videoPath == audioPath ? ffmpegVideoStream.PixelFormat : new PixelFormatYuv420P();

        var desiredState = new FrameState(
            playbackSettings.RealtimeOutput,
            false, // TODO: fallback filler needs to loop
            videoFormat,
            desiredPixelFormat,
            await playbackSettings.ScaledSize.Map(ss => new FrameSize(ss.Width, ss.Height))
                .IfNoneAsync(new FrameSize(videoVersion.Width, videoVersion.Height)),
            new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height),
            playbackSettings.FrameRate,
            playbackSettings.VideoBitrate,
            playbackSettings.VideoBufferSize,
            playbackSettings.VideoTrackTimeScale,
            playbackSettings.Deinterlace);

        var ffmpegState = new FFmpegState(
            saveReports,
            hwAccel,
            VaapiDriverName(hwAccel, vaapiDriver),
            VaapiDeviceName(hwAccel, vaapiDevice),
            playbackSettings.StreamSeek,
            finish - now,
            channel.StreamingMode != StreamingMode.HttpLiveStreamingDirect,
            "ErsatzTV",
            channel.Name,
            maybeAudioStream.Map(s => Optional(s.Language)).Flatten(),
            outputFormat,
            hlsPlaylistPath,
            hlsSegmentTemplate,
            ptsOffset);

        _logger.LogDebug("FFmpeg desired state {FrameState}", desiredState);

        var pipelineBuilder = new PipelineBuilder(
            videoInputFile,
            audioInputFile,
            watermarkInputFile,
            subtitleInputFile,
            FileSystemLayout.FFmpegReportsFolder,
            FileSystemLayout.FontsCacheFolder,
            _logger);

        FFmpegPipeline pipeline = pipelineBuilder.Build(ffmpegState, desiredState);

        return GetCommand(ffmpegPath, videoInputFile, audioInputFile, watermarkInputFile, None, pipeline);
    }