示例#1
0
        private async Task AcquireResources(EncodingJob state, CancellationToken cancellationToken)
        {
            if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && IsoManager.CanMount(state.MediaPath))
            {
                state.IsoMount = await IsoManager.Mount(state.MediaPath, cancellationToken).ConfigureAwait(false);
            }

            if (state.MediaSource.RequiresOpening)
            {
                var liveStreamResponse = await MediaSourceManager.OpenLiveStream(new LiveStreamRequest
                {
                    OpenToken = state.MediaSource.OpenToken
                }, false, cancellationToken).ConfigureAwait(false);

                AttachMediaStreamInfo(state, liveStreamResponse.MediaSource, state.Options);

                if (state.IsVideoRequest)
                {
                    EncodingJobFactory.TryStreamCopy(state, state.Options);
                }
            }

            if (state.MediaSource.BufferMs.HasValue)
            {
                await Task.Delay(state.MediaSource.BufferMs.Value, cancellationToken).ConfigureAwait(false);
            }
        }
示例#2
0
        protected override string GetCommandLineArguments(EncodingJob state)
        {
            // Get the output codec name
            var videoCodec = EncodingJobFactory.GetVideoEncoder(state, GetEncodingOptions());

            var format   = string.Empty;
            var keyFrame = string.Empty;

            if (string.Equals(Path.GetExtension(state.OutputFilePath), ".mp4", StringComparison.OrdinalIgnoreCase) &&
                state.Options.Context == EncodingContext.Streaming)
            {
                // Comparison: https://github.com/jansmolders86/mediacenterjs/blob/master/lib/transcoding/desktop.js
                format = " -f mp4 -movflags frag_keyframe+empty_moov";
            }

            var threads = GetNumberOfThreads(state, string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase));

            var inputModifier = GetInputModifier(state);

            return(string.Format("{0} {1}{2} {3} {4} -map_metadata -1 -threads {5} {6}{7} -y \"{8}\"",
                                 inputModifier,
                                 GetInputArgument(state),
                                 keyFrame,
                                 GetMapArgs(state),
                                 GetVideoArguments(state, videoCodec),
                                 threads,
                                 GetAudioArguments(state),
                                 format,
                                 state.OutputFilePath
                                 ).Trim());
        }
示例#3
0
        /// <summary>
        /// Gets the input argument.
        /// </summary>
        /// <param name="state">The state.</param>
        /// <returns>System.String.</returns>
        protected string GetInputArgument(EncodingJob state)
        {
            var arg = string.Format("-i {0}", GetInputPathArgument(state));

            if (state.SubtitleStream != null && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode)
            {
                if (state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)
                {
                    if (state.VideoStream != null && state.VideoStream.Width.HasValue)
                    {
                        // This is hacky but not sure how to get the exact subtitle resolution
                        double height = state.VideoStream.Width.Value;
                        height /= 16;
                        height *= 9;

                        arg += string.Format(" -canvas_size {0}:{1}", state.VideoStream.Width.Value.ToString(CultureInfo.InvariantCulture), Convert.ToInt32(height).ToString(CultureInfo.InvariantCulture));
                    }
                    arg += " -i \"" + state.SubtitleStream.Path + "\"";
                }
            }

            if (state.IsVideoRequest)
            {
                var encodingOptions = GetEncodingOptions();
                var videoEncoder    = EncodingJobFactory.GetVideoEncoder(MediaEncoder, state, encodingOptions);
                if (videoEncoder.IndexOf("vaapi", StringComparison.OrdinalIgnoreCase) != -1)
                {
                    arg = "-hwaccel vaapi -hwaccel_output_format vaapi -vaapi_device " + encodingOptions.VaapiDevice + " " + arg;
                }
            }

            return(arg.Trim());
        }
示例#4
0
        private async Task AcquireResources(EncodingJob state, CancellationToken cancellationToken)
        {
            if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && IsoManager.CanMount(state.MediaPath))
            {
                state.IsoMount = await IsoManager.Mount(state.MediaPath, cancellationToken).ConfigureAwait(false);
            }

            if (string.IsNullOrEmpty(state.MediaPath))
            {
                var checkCodecs = false;

                if (string.Equals(state.ItemType, typeof(LiveTvChannel).Name))
                {
                    var streamInfo = await LiveTvManager.GetChannelStream(state.Options.ItemId, cancellationToken).ConfigureAwait(false);

                    state.LiveTvStreamId = streamInfo.Id;

                    state.MediaPath     = streamInfo.Path;
                    state.InputProtocol = streamInfo.Protocol;

                    await Task.Delay(1500, cancellationToken).ConfigureAwait(false);

                    AttachMediaStreamInfo(state, streamInfo, state.Options);
                    checkCodecs = true;
                }

                else if (string.Equals(state.ItemType, typeof(LiveTvVideoRecording).Name) ||
                         string.Equals(state.ItemType, typeof(LiveTvAudioRecording).Name))
                {
                    var streamInfo = await LiveTvManager.GetRecordingStream(state.Options.ItemId, cancellationToken).ConfigureAwait(false);

                    state.LiveTvStreamId = streamInfo.Id;

                    state.MediaPath     = streamInfo.Path;
                    state.InputProtocol = streamInfo.Protocol;

                    await Task.Delay(1500, cancellationToken).ConfigureAwait(false);

                    AttachMediaStreamInfo(state, streamInfo, state.Options);
                    checkCodecs = true;
                }

                if (state.IsVideoRequest && checkCodecs)
                {
                    if (state.VideoStream != null && EncodingJobFactory.CanStreamCopyVideo(state.Options, state.VideoStream))
                    {
                        state.OutputVideoCodec = "copy";
                    }

                    if (state.AudioStream != null && EncodingJobFactory.CanStreamCopyAudio(state.Options, state.AudioStream, state.SupportedAudioCodecs))
                    {
                        state.OutputAudioCodec = "copy";
                    }
                }
            }
        }
示例#5
0
        /// <summary>
        /// Gets the input argument.
        /// </summary>
        /// <param name="state">The state.</param>
        /// <returns>System.String.</returns>
        protected string GetInputArgument(EncodingJob state)
        {
            var arg = string.Format("-i {0}", GetInputPathArgument(state));

            if (state.SubtitleStream != null && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode)
            {
                if (state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)
                {
                    if (state.VideoStream != null && state.VideoStream.Width.HasValue)
                    {
                        // This is hacky but not sure how to get the exact subtitle resolution
                        double height = state.VideoStream.Width.Value;
                        height /= 16;
                        height *= 9;

                        arg += string.Format(" -canvas_size {0}:{1}", state.VideoStream.Width.Value.ToString(CultureInfo.InvariantCulture), Convert.ToInt32(height).ToString(CultureInfo.InvariantCulture));
                    }

                    var subtitlePath = state.SubtitleStream.Path;

                    if (string.Equals(Path.GetExtension(subtitlePath), ".sub", StringComparison.OrdinalIgnoreCase))
                    {
                        var idxFile = Path.ChangeExtension(subtitlePath, ".idx");
                        if (FileSystem.FileExists(idxFile))
                        {
                            subtitlePath = idxFile;
                        }
                    }

                    arg += " -i \"" + subtitlePath + "\"";
                }
            }

            if (state.IsVideoRequest)
            {
                var encodingOptions = GetEncodingOptions();
                var videoEncoder    = EncodingJobFactory.GetVideoEncoder(MediaEncoder, state, encodingOptions);
                if (videoEncoder.IndexOf("vaapi", StringComparison.OrdinalIgnoreCase) != -1)
                {
                    var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode;
                    var hwOutputFormat   = "vaapi";

                    if (hasGraphicalSubs)
                    {
                        hwOutputFormat = "yuv420p";
                    }

                    arg = "-hwaccel vaapi -hwaccel_output_format " + hwOutputFormat + " -vaapi_device " + encodingOptions.VaapiDevice + " " + arg;
                }
            }

            return(arg.Trim());
        }
示例#6
0
        private void AttachMediaStreamInfo(EncodingJob state,
                                           ChannelMediaInfo mediaInfo,
                                           EncodingJobOptions videoRequest)
        {
            var mediaSource = mediaInfo.ToMediaSource();

            state.InputProtocol              = mediaSource.Protocol;
            state.MediaPath                  = mediaSource.Path;
            state.RunTimeTicks               = mediaSource.RunTimeTicks;
            state.RemoteHttpHeaders          = mediaSource.RequiredHttpHeaders;
            state.InputBitrate               = mediaSource.Bitrate;
            state.InputFileSize              = mediaSource.Size;
            state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate;

            if (state.ReadInputAtNativeFramerate)
            {
                state.OutputAudioSync = "1000";
                state.InputVideoSync  = "-1";
                state.InputAudioSync  = "1";
            }

            EncodingJobFactory.AttachMediaStreamInfo(state, mediaSource.MediaStreams, videoRequest);
        }
示例#7
0
        /// <summary>
        /// Gets audio arguments to pass to ffmpeg
        /// </summary>
        /// <param name="state">The state.</param>
        /// <returns>System.String.</returns>
        private string GetAudioArguments(EncodingJob state)
        {
            // If the video doesn't have an audio stream, return a default.
            if (state.AudioStream == null && state.VideoStream != null)
            {
                return(string.Empty);
            }

            // Get the output codec name
            var codec = EncodingJobFactory.GetAudioEncoder(state);

            var args = "-codec:a:0 " + codec;

            if (string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase))
            {
                return(args);
            }

            // Add the number of audio channels
            var channels = state.OutputAudioChannels;

            if (channels.HasValue)
            {
                args += " -ac " + channels.Value;
            }

            var bitrate = state.OutputAudioBitrate;

            if (bitrate.HasValue)
            {
                args += " -ab " + bitrate.Value.ToString(UsCulture);
            }

            args += " " + GetAudioFilterParam(state, false);

            return(args);
        }
示例#8
0
 private void AttachMediaStreamInfo(EncodingJob state,
                                    MediaSourceInfo mediaSource,
                                    EncodingJobOptions videoRequest)
 {
     EncodingJobFactory.AttachMediaStreamInfo(state, mediaSource, videoRequest);
 }
示例#9
0
        /// <summary>
        /// Gets the video bitrate to specify on the command line
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="videoCodec">The video codec.</param>
        /// <returns>System.String.</returns>
        protected string GetVideoQualityParam(EncodingJob state, string videoCodec)
        {
            var param = string.Empty;

            var isVc1 = state.VideoStream != null &&
                        string.Equals(state.VideoStream.Codec, "vc1", StringComparison.OrdinalIgnoreCase);

            if (string.Equals(videoCodec, "libx264", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset superfast";

                param += " -crf 23";
            }

            else if (string.Equals(videoCodec, "libx265", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset fast";

                param += " -crf 28";
            }

            // h264 (h264_qsv)
            else if (string.Equals(videoCodec, "h264_qsv", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset 7 -look_ahead 0";
            }

            // h264 (libnvenc)
            else if (string.Equals(videoCodec, "libnvenc", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset high-performance";
            }

            // webm
            else if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
            {
                // Values 0-3, 0 being highest quality but slower
                var profileScore = 0;

                string crf;
                var    qmin = "0";
                var    qmax = "50";

                crf = "10";

                if (isVc1)
                {
                    profileScore++;
                }

                // Max of 2
                profileScore = Math.Min(profileScore, 2);

                // http://www.webmproject.org/docs/encoder-parameters/
                param = string.Format("-speed 16 -quality good -profile:v {0} -slices 8 -crf {1} -qmin {2} -qmax {3}",
                                      profileScore.ToString(UsCulture),
                                      crf,
                                      qmin,
                                      qmax);
            }

            else if (string.Equals(videoCodec, "mpeg4", StringComparison.OrdinalIgnoreCase))
            {
                param = "-mbd rd -flags +mv4+aic -trellis 2 -cmp 2 -subcmp 2 -bf 2";
            }

            // asf/wmv
            else if (string.Equals(videoCodec, "wmv2", StringComparison.OrdinalIgnoreCase))
            {
                param = "-qmin 2";
            }

            else if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
            {
                param = "-mbd 2";
            }

            param += GetVideoBitrateParam(state, videoCodec);

            var framerate = GetFramerateParam(state);

            if (framerate.HasValue)
            {
                param += string.Format(" -r {0}", framerate.Value.ToString(UsCulture));
            }

            if (!string.IsNullOrEmpty(state.OutputVideoSync))
            {
                param += " -vsync " + state.OutputVideoSync;
            }

            if (!string.IsNullOrEmpty(state.Options.Profile))
            {
                param += " -profile:v " + state.Options.Profile;
            }

            var levelString = state.Options.Level.HasValue ? state.Options.Level.Value.ToString(CultureInfo.InvariantCulture) : null;

            if (!string.IsNullOrEmpty(levelString))
            {
                var h264Encoder = EncodingJobFactory.GetH264Encoder(state, GetEncodingOptions());

                // h264_qsv and libnvenc expect levels to be expressed as a decimal. libx264 supports decimal and non-decimal format
                if (String.Equals(h264Encoder, "h264_qsv", StringComparison.OrdinalIgnoreCase) || String.Equals(h264Encoder, "libnvenc", StringComparison.OrdinalIgnoreCase))
                {
                    switch (levelString)
                    {
                    case "30":
                        param += " -level 3";
                        break;

                    case "31":
                        param += " -level 3.1";
                        break;

                    case "32":
                        param += " -level 3.2";
                        break;

                    case "40":
                        param += " -level 4";
                        break;

                    case "41":
                        param += " -level 4.1";
                        break;

                    case "42":
                        param += " -level 4.2";
                        break;

                    case "50":
                        param += " -level 5";
                        break;

                    case "51":
                        param += " -level 5.1";
                        break;

                    case "52":
                        param += " -level 5.2";
                        break;

                    default:
                        param += " -level " + levelString;
                        break;
                    }
                }
                else
                {
                    param += " -level " + levelString;
                }
            }

            return("-pix_fmt yuv420p " + param);
        }