Exemplo n.º 1
0
        protected override string GetCommandLineArguments(EncodingJob job)
        {
            var audioTranscodeParams = new List<string>();

            var bitrate = job.OutputAudioBitrate;

            if (bitrate.HasValue)
            {
                audioTranscodeParams.Add("-ab " + bitrate.Value.ToString(UsCulture));
            }

            if (job.OutputAudioChannels.HasValue)
            {
                audioTranscodeParams.Add("-ac " + job.OutputAudioChannels.Value.ToString(UsCulture));
            }

            if (job.OutputAudioSampleRate.HasValue)
            {
                audioTranscodeParams.Add("-ar " + job.OutputAudioSampleRate.Value.ToString(UsCulture));
            }

            var threads = GetNumberOfThreads(job, false);

            var inputModifier = GetInputModifier(job);

            return string.Format("{0} {1} -threads {2}{3} {4} -id3v2_version 3 -write_id3v1 1 -y \"{5}\"",
                inputModifier,
                GetInputArgument(job),
                threads,
                " -vn",
                string.Join(" ", audioTranscodeParams.ToArray()),
                job.OutputFilePath).Trim();
        }
Exemplo n.º 2
0
        protected override string GetOutputFileExtension(EncodingJob state)
        {
            var ext = base.GetOutputFileExtension(state);

            if (!string.IsNullOrEmpty(ext))
            {
                return ext;
            }

            var audioCodec = state.Options.AudioCodec;

            if (string.Equals("aac", audioCodec, StringComparison.OrdinalIgnoreCase))
            {
                return ".aac";
            }
            if (string.Equals("mp3", audioCodec, StringComparison.OrdinalIgnoreCase))
            {
                return ".mp3";
            }
            if (string.Equals("vorbis", audioCodec, StringComparison.OrdinalIgnoreCase))
            {
                return ".ogg";
            }
            if (string.Equals("wma", audioCodec, StringComparison.OrdinalIgnoreCase))
            {
                return ".wma";
            }

            return null;
        }
Exemplo n.º 3
0
        protected override string GetCommandLineArguments(EncodingJob state)
        {
            // Get the output codec name
            var videoCodec = EncodingJobFactory.GetVideoEncoder(state, GetEncodingOptions());

            var format = string.Empty;
            var keyFrame = string.Empty;

            if (string.Equals(Path.GetExtension(state.OutputFilePath), ".mp4", StringComparison.OrdinalIgnoreCase) &&
                state.Options.Context == EncodingContext.Streaming)
            {
                // Comparison: https://github.com/jansmolders86/mediacenterjs/blob/master/lib/transcoding/desktop.js
                format = " -f mp4 -movflags frag_keyframe+empty_moov";
            }

            var threads = GetNumberOfThreads(state, string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase));

            var inputModifier = GetInputModifier(state);

            return string.Format("{0} {1}{2} {3} {4} -map_metadata -1 -threads {5} {6}{7} -y \"{8}\"",
                inputModifier,
                GetInputArgument(state),
                keyFrame,
                GetMapArgs(state),
                GetVideoArguments(state, videoCodec),
                threads,
                GetAudioArguments(state),
                format,
                state.OutputFilePath
                ).Trim();
        }
Exemplo n.º 4
0
        protected override Task<string> GetCommandLineArguments(EncodingJob state)
        {
            var audioTranscodeParams = new List<string>();

            var bitrate = state.OutputAudioBitrate;

            if (bitrate.HasValue)
            {
                audioTranscodeParams.Add("-ab " + bitrate.Value.ToString(UsCulture));
            }

            if (state.OutputAudioChannels.HasValue)
            {
                audioTranscodeParams.Add("-ac " + state.OutputAudioChannels.Value.ToString(UsCulture));
            }

            // opus will fail on 44100
            if (!string.Equals(state.OutputAudioCodec, "opus", StringComparison.OrdinalIgnoreCase))
            {
                if (state.OutputAudioSampleRate.HasValue)
                {
                    audioTranscodeParams.Add("-ar " + state.OutputAudioSampleRate.Value.ToString(UsCulture));
                }
            }

            var threads = GetNumberOfThreads(state, false);

            var inputModifier = GetInputModifier(state);

            var albumCoverInput = string.Empty;
            var mapArgs = string.Empty;
            var metadata = string.Empty;
            var vn = string.Empty;

            if (!string.IsNullOrWhiteSpace(state.AlbumCoverPath))
            {
                albumCoverInput = " -i \"" + state.AlbumCoverPath + "\"";
                mapArgs = " -map 0:a -map 1:v -c:v copy";
                metadata = " -metadata:s:v title=\"Album cover\" -metadata:s:v comment=\"Cover(Front)\"";
            }
            else
            {
                vn = " -vn";
            }

            var result = string.Format("{0} {1}{6}{7} -threads {2}{3} {4} -id3v2_version 3 -write_id3v1 1{8} -y \"{5}\"",
                inputModifier,
                GetInputArgument(state),
                threads,
                vn,
                string.Join(" ", audioTranscodeParams.ToArray()),
                state.OutputFilePath,
                albumCoverInput,
                mapArgs,
                metadata).Trim();

            return Task.FromResult(result);
        }
Exemplo n.º 5
0
        /// <summary>
        /// Gets video arguments to pass to ffmpeg
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="videoCodec">The video codec.</param>
        /// <returns>System.String.</returns>
        private string GetVideoArguments(EncodingJob state, string videoCodec)
        {
            var args = "-codec:v:0 " + videoCodec;

            if (state.EnableMpegtsM2TsMode)
            {
                args += " -mpegts_m2ts_mode 1";
            }

            var isOutputMkv = string.Equals(state.Options.OutputContainer, "mkv", StringComparison.OrdinalIgnoreCase);

            if (state.RunTimeTicks.HasValue)
            {
                //args += " -copyts -avoid_negative_ts disabled -start_at_zero";
            }

            if (string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase))
            {
                if (state.VideoStream != null && IsH264(state.VideoStream) &&
                    (string.Equals(state.Options.OutputContainer, "ts", StringComparison.OrdinalIgnoreCase) || isOutputMkv))
                {
                    args += " -bsf:v h264_mp4toannexb";
                }

                return args;
            }

            var keyFrameArg = string.Format(" -force_key_frames expr:gte(t,n_forced*{0})",
                5.ToString(UsCulture));

            args += keyFrameArg;

            var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream;

            // Add resolution params, if specified
            if (!hasGraphicalSubs)
            {
                args += GetOutputSizeParam(state, videoCodec);
            }

            var qualityParam = GetVideoQualityParam(state, videoCodec);

            if (!string.IsNullOrEmpty(qualityParam))
            {
                args += " " + qualityParam.Trim();
            }

            // This is for internal graphical subs
            if (hasGraphicalSubs)
            {
                args += GetGraphicalSubtitleParam(state, videoCodec);
            }

            return args;
        }
Exemplo n.º 6
0
        /// <summary>
        /// Gets video arguments to pass to ffmpeg
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="codec">The video codec.</param>
        /// <returns>System.String.</returns>
        private string GetVideoArguments(EncodingJob state, string codec)
        {
            var args = "-codec:v:0 " + codec;

            if (state.EnableMpegtsM2TsMode)
            {
                args += " -mpegts_m2ts_mode 1";
            }

            // See if we can save come cpu cycles by avoiding encoding
            if (string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase))
            {
                return state.VideoStream != null && IsH264(state.VideoStream) && string.Equals(state.Options.OutputContainer, "ts", StringComparison.OrdinalIgnoreCase) ?
                    args + " -bsf:v h264_mp4toannexb" :
                    args;
            }

            if (state.Options.Context == EncodingContext.Streaming)
            {
                var keyFrameArg = string.Format(" -force_key_frames expr:gte(t,n_forced*{0})",
                    5.ToString(UsCulture));

                args += keyFrameArg;
            }

            var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream;

            // Add resolution params, if specified
            if (!hasGraphicalSubs)
            {
                args += GetOutputSizeParam(state, codec);
            }

            var qualityParam = GetVideoQualityParam(state, codec, false);

            if (!string.IsNullOrEmpty(qualityParam))
            {
                args += " " + qualityParam.Trim();
            }

            // This is for internal graphical subs
            if (hasGraphicalSubs)
            {
                args += GetGraphicalSubtitleParam(state, codec);
            }

            return args;
        }
Exemplo n.º 7
0
        public async void StartStreamingLog(EncodingJob transcodingJob, Stream source, Stream target)
        {
            try
            {
                using (var reader = new StreamReader(source))
                {
                    while (!reader.EndOfStream)
                    {
                        var line = await reader.ReadLineAsync().ConfigureAwait(false);

                        ParseLogLine(line, transcodingJob);

                        var bytes = Encoding.UTF8.GetBytes(Environment.NewLine + line);

                        await target.WriteAsync(bytes, 0, bytes.Length).ConfigureAwait(false);
                    }
                }
            }
            catch (Exception ex)
            {
                _logger.ErrorException("Error reading ffmpeg log", ex);
            }
        }
Exemplo n.º 8
0
 private void AttachMediaStreamInfo(EncodingJob state,
   MediaSourceInfo mediaSource,
   EncodingJobOptions videoRequest)
 {
     EncodingJobFactory.AttachMediaStreamInfo(state, mediaSource, videoRequest);
 }
Exemplo n.º 9
0
        /// <summary>
        /// Gets the text subtitle param.
        /// </summary>
        /// <param name="state">The state.</param>
        /// <returns>System.String.</returns>
        protected string GetTextSubtitleParam(EncodingJob state)
        {
            var seconds = Math.Round(TimeSpan.FromTicks(state.Options.StartTimeTicks ?? 0).TotalSeconds);

            if (state.SubtitleStream.IsExternal)
            {
                var subtitlePath = state.SubtitleStream.Path;

                var charsetParam = string.Empty;

                if (!string.IsNullOrEmpty(state.SubtitleStream.Language))
                {
                    var charenc = SubtitleEncoder.GetSubtitleFileCharacterSet(subtitlePath, state.MediaSource.Protocol, CancellationToken.None).Result;

                    if (!string.IsNullOrEmpty(charenc))
                    {
                        charsetParam = ":charenc=" + charenc;
                    }
                }

                // TODO: Perhaps also use original_size=1920x800 ??
                return string.Format("subtitles=filename='{0}'{1},setpts=PTS -{2}/TB",
                    MediaEncoder.EscapeSubtitleFilterPath(subtitlePath),
                    charsetParam,
                    seconds.ToString(UsCulture));
            }

            return string.Format("subtitles='{0}:si={1}',setpts=PTS -{2}/TB",
                MediaEncoder.EscapeSubtitleFilterPath(state.MediaPath),
                state.InternalSubtitleStreamOffset.ToString(UsCulture),
                seconds.ToString(UsCulture));
        }
Exemplo n.º 10
0
        private string GetInputPathArgument(EncodingJob job)
        {
            var protocol = job.InputProtocol;

            var inputPath = new[] { job.MediaPath };

            if (job.IsInputVideo)
            {
                if (!(job.VideoType == VideoType.Iso && job.IsoMount == null))
                {
                    inputPath = MediaEncoderHelpers.GetInputArgument(FileSystem, job.MediaPath, job.InputProtocol, job.IsoMount, job.PlayableStreamFileNames);
                }
            }

            return MediaEncoder.GetInputArgument(inputPath, protocol);
        }
Exemplo n.º 11
0
 private void AttachMediaStreamInfo(EncodingJob state,
   MediaSourceInfo mediaSource,
   EncodingJobOptions videoRequest)
 {
     EncodingJobFactory.AttachMediaStreamInfo(state, mediaSource, videoRequest);
 }
Exemplo n.º 12
0
        /// <summary>
        /// Gets the name of the output video codec
        /// </summary>
        /// <param name="state">The state.</param>
        /// <returns>System.String.</returns>
        protected string GetVideoDecoder(EncodingJob state)
        {
            if (string.Equals(GetEncodingOptions().HardwareVideoDecoder, "qsv", StringComparison.OrdinalIgnoreCase))
            {
                if (state.VideoStream != null && !string.IsNullOrWhiteSpace(state.VideoStream.Codec))
                {
                    switch (state.MediaSource.VideoStream.Codec.ToLower())
                    {
                        case "avc":
                        case "h264":
                            if (MediaEncoder.SupportsDecoder("h264_qsv"))
                            {
                                return "-c:v h264_qsv ";
                            }
                            break;
                        case "mpeg2video":
                            if (MediaEncoder.SupportsDecoder("mpeg2_qsv"))
                            {
                                return "-c:v mpeg2_qsv ";
                            }
                            break;
                        case "vc1":
                            if (MediaEncoder.SupportsDecoder("vc1_qsv"))
                            {
                                return "-c:v vc1_qsv ";
                            }
                            break;
                    }
                }
            }

            // leave blank so ffmpeg will decide
            return null;
        }
Exemplo n.º 13
0
        /// <summary>
        /// Gets the probe size argument.
        /// </summary>
        /// <param name="job">The job.</param>
        /// <returns>System.String.</returns>
        private string GetProbeSizeArgument(EncodingJob job)
        {
            if (job.PlayableStreamFileNames.Count > 0)
            {
                return MediaEncoder.GetProbeSizeArgument(job.PlayableStreamFileNames.ToArray(), job.InputProtocol);
            }

            return MediaEncoder.GetProbeSizeArgument(new[] { job.MediaPath }, job.InputProtocol);
        }
Exemplo n.º 14
0
 private void OnTranscodeBeginning(EncodingJob job)
 {
     job.ReportTranscodingProgress(null, null, null, null, null);
 }
Exemplo n.º 15
0
 /// <summary>
 /// Gets the number of threads.
 /// </summary>
 /// <returns>System.Int32.</returns>
 protected int GetNumberOfThreads(EncodingJob job, bool isWebm)
 {
     return job.Options.CpuCoreLimit ?? 0;
 }
Exemplo n.º 16
0
        /// <summary>
        /// Processes the exited.
        /// </summary>
        /// <param name="process">The process.</param>
        /// <param name="job">The job.</param>
        private void OnFfMpegProcessExited(IProcess process, EncodingJob job)
        {
            job.HasExited = true;

            Logger.LogDebug("Disposing stream resources");
            job.Dispose();

            var isSuccesful = false;

            try
            {
                var exitCode = process.ExitCode;
                Logger.LogInformation("FFMpeg exited with code {0}", exitCode);

                isSuccesful = exitCode == 0;
            }
            catch (Exception ex)
            {
                Logger.LogError(ex, "FFMpeg exited with an error.");
            }

            if (isSuccesful && !job.IsCancelled)
            {
                job.TaskCompletionSource.TrySetResult(true);
            }
            else if (job.IsCancelled)
            {
                try
                {
                    DeleteFiles(job);
                }
                catch
                {
                }
                try
                {
                    job.TaskCompletionSource.TrySetException(new OperationCanceledException());
                }
                catch
                {
                }
            }
            else
            {
                try
                {
                    DeleteFiles(job);
                }
                catch
                {
                }
                try
                {
                    job.TaskCompletionSource.TrySetException(new Exception("Encoding failed"));
                }
                catch
                {
                }
            }

            // This causes on exited to be called twice:
            //try
            //{
            //    // Dispose the process
            //    process.Dispose();
            //}
            //catch (Exception ex)
            //{
            //    Logger.LogError("Error disposing ffmpeg.", ex);
            //}
        }
Exemplo n.º 17
0
 protected virtual void DeleteFiles(EncodingJob job)
 {
     FileSystem.DeleteFile(job.OutputFilePath);
 }
Exemplo n.º 18
0
        internal static void AttachMediaSourceInfo(EncodingJob state,
                                                   MediaSourceInfo mediaSource,
                                                   EncodingJobOptions videoRequest)
        {
            state.MediaPath         = mediaSource.Path;
            state.InputProtocol     = mediaSource.Protocol;
            state.InputContainer    = mediaSource.Container;
            state.InputFileSize     = mediaSource.Size;
            state.InputBitrate      = mediaSource.Bitrate;
            state.RunTimeTicks      = mediaSource.RunTimeTicks;
            state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;

            if (mediaSource.VideoType.HasValue)
            {
                state.VideoType = mediaSource.VideoType.Value;
            }

            state.IsoType = mediaSource.IsoType;

            state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList();

            if (mediaSource.Timestamp.HasValue)
            {
                state.InputTimestamp = mediaSource.Timestamp.Value;
            }

            state.InputProtocol              = mediaSource.Protocol;
            state.MediaPath                  = mediaSource.Path;
            state.RunTimeTicks               = mediaSource.RunTimeTicks;
            state.RemoteHttpHeaders          = mediaSource.RequiredHttpHeaders;
            state.InputBitrate               = mediaSource.Bitrate;
            state.InputFileSize              = mediaSource.Size;
            state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate;

            if (state.ReadInputAtNativeFramerate ||
                mediaSource.Protocol == MediaProtocol.File && string.Equals(mediaSource.Container, "wtv", StringComparison.OrdinalIgnoreCase))
            {
                state.OutputAudioSync = "1000";
                state.InputVideoSync  = "-1";
                state.InputAudioSync  = "1";
            }

            if (string.Equals(mediaSource.Container, "wma", StringComparison.OrdinalIgnoreCase))
            {
                // Seeing some stuttering when transcoding wma to audio-only HLS
                state.InputAudioSync = "1";
            }

            var mediaStreams = mediaSource.MediaStreams;

            if (videoRequest != null)
            {
                if (string.IsNullOrEmpty(videoRequest.VideoCodec))
                {
                    videoRequest.VideoCodec = InferVideoCodec(videoRequest.OutputContainer);
                }

                state.VideoStream    = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video);
                state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false);
                state.AudioStream    = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio);

                if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal)
                {
                    state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream);
                }

                if (state.VideoStream != null && state.VideoStream.IsInterlaced)
                {
                    state.DeInterlace = true;
                }

                EnforceResolutionLimit(state, videoRequest);
            }
            else
            {
                state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true);
            }

            state.MediaSource = mediaSource;
        }
Exemplo n.º 19
0
        /// <summary>
        /// If we're going to put a fixed size on the command line, this will calculate it
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="outputVideoCodec">The output video codec.</param>
        /// <param name="allowTimeStampCopy">if set to <c>true</c> [allow time stamp copy].</param>
        /// <returns>System.String.</returns>
        protected string GetOutputSizeParam(EncodingJob state,
            string outputVideoCodec,
            bool allowTimeStampCopy = true)
        {
            // http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/

            var request = state.Options;

            var filters = new List<string>();

            if (state.DeInterlace)
            {
                filters.Add("yadif=0:-1:0");
            }

            // If fixed dimensions were supplied
            if (request.Width.HasValue && request.Height.HasValue)
            {
                var widthParam = request.Width.Value.ToString(UsCulture);
                var heightParam = request.Height.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", widthParam, heightParam));
            }

            // If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size
            else if (request.MaxWidth.HasValue && request.MaxHeight.HasValue)
            {
                var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);
                var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc(min(iw\\,{0})/2)*2:trunc(min((iw/dar)\\,{1})/2)*2", maxWidthParam, maxHeightParam));
            }

            // If a fixed width was requested
            else if (request.Width.HasValue)
            {
                var widthParam = request.Width.Value.ToString(UsCulture);

                filters.Add(string.Format("scale={0}:trunc(ow/a/2)*2", widthParam));
            }

            // If a fixed height was requested
            else if (request.Height.HasValue)
            {
                var heightParam = request.Height.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc(oh*a*2)/2:{0}", heightParam));
            }

            // If a max width was requested
            else if (request.MaxWidth.HasValue && (!request.MaxHeight.HasValue || state.VideoStream == null))
            {
                var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=min(iw\\,{0}):trunc(ow/dar/2)*2", maxWidthParam));
            }

            // If a max height was requested
            else if (request.MaxHeight.HasValue && (!request.MaxWidth.HasValue || state.VideoStream == null))
            {
                var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc(oh*a*2)/2:min(ih\\,{0})", maxHeightParam));
            }

            else if (request.MaxWidth.HasValue ||
                request.MaxHeight.HasValue ||
                request.Width.HasValue ||
                request.Height.HasValue)
            {
                if (state.VideoStream != null)
                {
                    // Need to perform calculations manually

                    // Try to account for bad media info
                    var currentHeight = state.VideoStream.Height ?? request.MaxHeight ?? request.Height ?? 0;
                    var currentWidth = state.VideoStream.Width ?? request.MaxWidth ?? request.Width ?? 0;

                    var outputSize = DrawingUtils.Resize(currentWidth, currentHeight, request.Width, request.Height, request.MaxWidth, request.MaxHeight);

                    var manualWidthParam = outputSize.Width.ToString(UsCulture);
                    var manualHeightParam = outputSize.Height.ToString(UsCulture);

                    filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", manualWidthParam, manualHeightParam));
                }
            }

            var output = string.Empty;

            if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream)
            {
                var subParam = GetTextSubtitleParam(state);

                filters.Add(subParam);

                if (allowTimeStampCopy)
                {
                    output += " -copyts";
                }
            }

            if (filters.Count > 0)
            {
                output += string.Format(" -vf \"{0}\"", string.Join(",", filters.ToArray()));
            }

            return output;
        }
Exemplo n.º 20
0
        /// <summary>
        /// Gets the video bitrate to specify on the command line
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="videoCodec">The video codec.</param>
        /// <param name="isHls">if set to <c>true</c> [is HLS].</param>
        /// <returns>System.String.</returns>
        protected string GetVideoQualityParam(EncodingJob state, string videoCodec, bool isHls)
        {
            var param = string.Empty;

            var isVc1 = state.VideoStream != null &&
                string.Equals(state.VideoStream.Codec, "vc1", StringComparison.OrdinalIgnoreCase);

            var qualitySetting = GetQualitySetting();

            if (string.Equals(videoCodec, "libx264", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset superfast";

                switch (qualitySetting)
                {
                    case EncodingQuality.HighSpeed:
                        param += " -crf 28";
                        break;
                    case EncodingQuality.HighQuality:
                        param += " -crf 25";
                        break;
                    case EncodingQuality.MaxQuality:
                        param += " -crf 21";
                        break;
                }
            }

            else if (string.Equals(videoCodec, "libx265", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset fast";

                switch (qualitySetting)
                {
                    case EncodingQuality.HighSpeed:
                        param += " -crf 28";
                        break;
                    case EncodingQuality.HighQuality:
                        param += " -crf 25";
                        break;
                    case EncodingQuality.MaxQuality:
                        param += " -crf 21";
                        break;
                }
            }

            // webm
            else if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
            {
                // Values 0-3, 0 being highest quality but slower
                var profileScore = 0;

                string crf;
                var qmin = "0";
                var qmax = "50";

                switch (qualitySetting)
                {
                    case EncodingQuality.HighSpeed:
                        crf = "10";
                        break;
                    case EncodingQuality.HighQuality:
                        crf = "6";
                        break;
                    case EncodingQuality.MaxQuality:
                        crf = "4";
                        break;
                    default:
                        throw new ArgumentException("Unrecognized quality setting");
                }

                if (isVc1)
                {
                    profileScore++;
                }

                // Max of 2
                profileScore = Math.Min(profileScore, 2);

                // http://www.webmproject.org/docs/encoder-parameters/
                param = string.Format("-speed 16 -quality good -profile:v {0} -slices 8 -crf {1} -qmin {2} -qmax {3}",
                    profileScore.ToString(UsCulture),
                    crf,
                    qmin,
                    qmax);
            }

            else if (string.Equals(videoCodec, "mpeg4", StringComparison.OrdinalIgnoreCase))
            {
                param = "-mbd rd -flags +mv4+aic -trellis 2 -cmp 2 -subcmp 2 -bf 2";
            }

            // asf/wmv
            else if (string.Equals(videoCodec, "wmv2", StringComparison.OrdinalIgnoreCase))
            {
                param = "-qmin 2";
            }

            else if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
            {
                param = "-mbd 2";
            }

            param += GetVideoBitrateParam(state, videoCodec, isHls);

            var framerate = GetFramerateParam(state);
            if (framerate.HasValue)
            {
                param += string.Format(" -r {0}", framerate.Value.ToString(UsCulture));
            }

            if (!string.IsNullOrEmpty(state.OutputVideoSync))
            {
                param += " -vsync " + state.OutputVideoSync;
            }

            if (!string.IsNullOrEmpty(state.Options.Profile))
            {
                param += " -profile:v " + state.Options.Profile;
            }

            if (state.Options.Level.HasValue)
            {
                param += " -level " + state.Options.Level.Value.ToString(UsCulture);
            }

            return "-pix_fmt yuv420p " + param;
        }
Exemplo n.º 21
0
        internal static void TryStreamCopy(EncodingJob state,
            EncodingJobOptions videoRequest)
        {
            if (state.IsVideoRequest)
            {
                if (state.VideoStream != null && CanStreamCopyVideo(videoRequest, state.VideoStream))
                {
                    state.OutputVideoCodec = "copy";
                }

                if (state.AudioStream != null && CanStreamCopyAudio(videoRequest, state.AudioStream, state.SupportedAudioCodecs))
                {
                    state.OutputAudioCodec = "copy";
                }
            }
        }
Exemplo n.º 22
0
 protected abstract string GetCommandLineArguments(EncodingJob job);
Exemplo n.º 23
0
        public async Task <EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress <double> progress, CancellationToken cancellationToken)
        {
            var request = options;

            if (string.IsNullOrEmpty(request.AudioCodec))
            {
                request.AudioCodec = InferAudioCodec(request.OutputContainer);
            }

            var state = new EncodingJob(_logger, _mediaSourceManager)
            {
                Options        = options,
                IsVideoRequest = isVideoRequest,
                Progress       = progress
            };

            if (!string.IsNullOrWhiteSpace(request.AudioCodec))
            {
                state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
                request.AudioCodec         = state.SupportedAudioCodecs.FirstOrDefault();
            }

            var item = _libraryManager.GetItemById(request.ItemId);

            state.ItemType = item.GetType().Name;

            state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);

            var mediaSources = await _mediaSourceManager.GetPlayackMediaSources(request.ItemId, null, false, new[] { MediaType.Audio, MediaType.Video }, cancellationToken).ConfigureAwait(false);

            var mediaSource = string.IsNullOrEmpty(request.MediaSourceId)
               ? mediaSources.First()
               : mediaSources.First(i => string.Equals(i.Id, request.MediaSourceId));

            AttachMediaStreamInfo(state, mediaSource, options);

            state.OutputAudioBitrate    = GetAudioBitrateParam(request, state.AudioStream);
            state.OutputAudioSampleRate = request.AudioSampleRate;

            state.OutputAudioCodec = GetAudioCodec(request);

            state.OutputAudioChannels = GetNumAudioChannelsParam(request, state.AudioStream, state.OutputAudioCodec);

            if (isVideoRequest)
            {
                state.OutputVideoCodec   = GetVideoCodec(request);
                state.OutputVideoBitrate = GetVideoBitrateParamValue(request, state.VideoStream);

                if (state.OutputVideoBitrate.HasValue)
                {
                    var resolution = ResolutionNormalizer.Normalize(state.OutputVideoBitrate.Value,
                                                                    state.OutputVideoCodec,
                                                                    request.MaxWidth,
                                                                    request.MaxHeight);

                    request.MaxWidth  = resolution.MaxWidth;
                    request.MaxHeight = resolution.MaxHeight;
                }
            }

            ApplyDeviceProfileSettings(state);

            TryStreamCopy(state, request);

            return(state);
        }
Exemplo n.º 24
0
        public async Task <EncodingJob> CreateJob(EncodingJobOptions options, EncodingHelper encodingHelper, bool isVideoRequest, IProgress <double> progress, CancellationToken cancellationToken)
        {
            var request = options;

            if (string.IsNullOrEmpty(request.AudioCodec))
            {
                request.AudioCodec = InferAudioCodec(request.OutputContainer);
            }

            var state = new EncodingJob(_logger, _mediaSourceManager)
            {
                Options        = options,
                IsVideoRequest = isVideoRequest,
                Progress       = progress
            };

            if (!string.IsNullOrWhiteSpace(request.VideoCodec))
            {
                state.SupportedVideoCodecs = request.VideoCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
                request.VideoCodec         = state.SupportedVideoCodecs.FirstOrDefault();
            }

            if (!string.IsNullOrWhiteSpace(request.AudioCodec))
            {
                state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
                request.AudioCodec         = state.SupportedAudioCodecs.FirstOrDefault();
            }

            if (!string.IsNullOrWhiteSpace(request.SubtitleCodec))
            {
                state.SupportedSubtitleCodecs = request.SubtitleCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
                request.SubtitleCodec         = state.SupportedSubtitleCodecs.FirstOrDefault(i => _mediaEncoder.CanEncodeToSubtitleCodec(i))
                                                ?? state.SupportedSubtitleCodecs.FirstOrDefault();
            }

            var item = _libraryManager.GetItemById(request.ItemId);

            state.ItemType = item.GetType().Name;

            state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);

            var primaryImage = item.GetImageInfo(ImageType.Primary, 0) ??
                               item.Parents.Select(i => i.GetImageInfo(ImageType.Primary, 0)).FirstOrDefault(i => i != null);

            if (primaryImage != null)
            {
                state.AlbumCoverPath = primaryImage.Path;
            }

            var mediaSources = await _mediaSourceManager.GetPlayackMediaSources(request.ItemId, null, false, new[] { MediaType.Audio, MediaType.Video }, cancellationToken).ConfigureAwait(false);

            var mediaSource = string.IsNullOrEmpty(request.MediaSourceId)
               ? mediaSources.First()
               : mediaSources.First(i => string.Equals(i.Id, request.MediaSourceId));

            var videoRequest = state.Options;

            encodingHelper.AttachMediaSourceInfo(state, mediaSource, null);

            //var container = Path.GetExtension(state.RequestedUrl);

            //if (string.IsNullOrEmpty(container))
            //{
            //    container = request.Static ?
            //        state.InputContainer :
            //        (Path.GetExtension(GetOutputFilePath(state)) ?? string.Empty).TrimStart('.');
            //}

            //state.OutputContainer = (container ?? string.Empty).TrimStart('.');

            state.OutputAudioBitrate    = encodingHelper.GetAudioBitrateParam(state.Options, state.AudioStream);
            state.OutputAudioSampleRate = request.AudioSampleRate;

            state.OutputAudioCodec = state.Options.AudioCodec;

            state.OutputAudioChannels = encodingHelper.GetNumAudioChannelsParam(state.Options, state.AudioStream, state.OutputAudioCodec);

            if (videoRequest != null)
            {
                state.OutputVideoCodec   = state.Options.VideoCodec;
                state.OutputVideoBitrate = encodingHelper.GetVideoBitrateParamValue(state.Options, state.VideoStream, state.OutputVideoCodec);

                if (state.OutputVideoBitrate.HasValue)
                {
                    var resolution = ResolutionNormalizer.Normalize(
                        state.VideoStream == null ? (int?)null : state.VideoStream.BitRate,
                        state.OutputVideoBitrate.Value,
                        state.VideoStream == null ? null : state.VideoStream.Codec,
                        state.OutputVideoCodec,
                        videoRequest.MaxWidth,
                        videoRequest.MaxHeight);

                    videoRequest.MaxWidth  = resolution.MaxWidth;
                    videoRequest.MaxHeight = resolution.MaxHeight;
                }
            }

            ApplyDeviceProfileSettings(state);

            if (videoRequest != null)
            {
                encodingHelper.TryStreamCopy(state);
            }

            //state.OutputFilePath = GetOutputFilePath(state);

            return(state);
        }
Exemplo n.º 25
0
        protected string GetInputModifier(EncodingJob job, bool genPts = true)
        {
            var inputModifier = string.Empty;

            var probeSize = GetProbeSizeArgument(job);
            inputModifier += " " + probeSize;
            inputModifier = inputModifier.Trim();

            var userAgentParam = GetUserAgentParam(job);

            if (!string.IsNullOrWhiteSpace(userAgentParam))
            {
                inputModifier += " " + userAgentParam;
            }

            inputModifier = inputModifier.Trim();

            inputModifier += " " + GetFastSeekCommandLineParameter(job.Options);
            inputModifier = inputModifier.Trim();

            if (job.IsVideoRequest && genPts)
            {
                inputModifier += " -fflags +genpts";
            }

            if (!string.IsNullOrEmpty(job.InputAudioSync))
            {
                inputModifier += " -async " + job.InputAudioSync;
            }

            if (!string.IsNullOrEmpty(job.InputVideoSync))
            {
                inputModifier += " -vsync " + job.InputVideoSync;
            }

            if (job.ReadInputAtNativeFramerate)
            {
                inputModifier += " -re";
            }

            var videoDecoder = GetVideoDecoder(job);
            if (!string.IsNullOrWhiteSpace(videoDecoder))
            {
                inputModifier += " " + videoDecoder;
            }

            return inputModifier;
        }
Exemplo n.º 26
0
        /// <summary>
        /// Gets the video bitrate to specify on the command line
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="videoEncoder">The video codec.</param>
        /// <returns>System.String.</returns>
        protected string GetVideoQualityParam(EncodingJob state, string videoEncoder)
        {
            var param = string.Empty;

            var isVc1 = state.VideoStream != null &&
                        string.Equals(state.VideoStream.Codec, "vc1", StringComparison.OrdinalIgnoreCase);

            if (string.Equals(videoEncoder, "libx264", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset superfast";

                param += " -crf 23";
            }

            else if (string.Equals(videoEncoder, "libx265", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset fast";

                param += " -crf 28";
            }

            // h264 (h264_qsv)
            else if (string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset 7 -look_ahead 0";
            }

            // h264 (h264_nvenc)
            else if (string.Equals(videoEncoder, "h264_nvenc", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset llhq";
            }

            // webm
            else if (string.Equals(videoEncoder, "libvpx", StringComparison.OrdinalIgnoreCase))
            {
                // Values 0-3, 0 being highest quality but slower
                var profileScore = 0;

                string crf;
                var    qmin = "0";
                var    qmax = "50";

                crf = "10";

                if (isVc1)
                {
                    profileScore++;
                }

                // Max of 2
                profileScore = Math.Min(profileScore, 2);

                // http://www.webmproject.org/docs/encoder-parameters/
                param = string.Format("-speed 16 -quality good -profile:v {0} -slices 8 -crf {1} -qmin {2} -qmax {3}",
                                      profileScore.ToString(UsCulture),
                                      crf,
                                      qmin,
                                      qmax);
            }

            else if (string.Equals(videoEncoder, "mpeg4", StringComparison.OrdinalIgnoreCase))
            {
                param = "-mbd rd -flags +mv4+aic -trellis 2 -cmp 2 -subcmp 2 -bf 2";
            }

            // asf/wmv
            else if (string.Equals(videoEncoder, "wmv2", StringComparison.OrdinalIgnoreCase))
            {
                param = "-qmin 2";
            }

            else if (string.Equals(videoEncoder, "msmpeg4", StringComparison.OrdinalIgnoreCase))
            {
                param = "-mbd 2";
            }

            param += GetVideoBitrateParam(state, videoEncoder);

            var framerate = GetFramerateParam(state);

            if (framerate.HasValue)
            {
                param += string.Format(" -r {0}", framerate.Value.ToString(UsCulture));
            }

            if (!string.IsNullOrEmpty(state.OutputVideoSync))
            {
                param += " -vsync " + state.OutputVideoSync;
            }

            if (!string.IsNullOrEmpty(state.Options.Profile))
            {
                if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase) &&
                    !string.Equals(videoEncoder, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
                {
                    // not supported by h264_omx
                    param += " -profile:v " + state.Options.Profile;
                }
            }

            var levelString = state.Options.Level.HasValue ? state.Options.Level.Value.ToString(CultureInfo.InvariantCulture) : null;

            if (!string.IsNullOrEmpty(levelString))
            {
                levelString = NormalizeTranscodingLevel(state.OutputVideoCodec, levelString);

                // h264_qsv and h264_nvenc expect levels to be expressed as a decimal. libx264 supports decimal and non-decimal format
                // also needed for libx264 due to https://trac.ffmpeg.org/ticket/3307
                if (string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase) ||
                    string.Equals(videoEncoder, "h264_nvenc", StringComparison.OrdinalIgnoreCase) ||
                    string.Equals(videoEncoder, "libx264", StringComparison.OrdinalIgnoreCase))
                {
                    switch (levelString)
                    {
                    case "30":
                        param += " -level 3.0";
                        break;

                    case "31":
                        param += " -level 3.1";
                        break;

                    case "32":
                        param += " -level 3.2";
                        break;

                    case "40":
                        param += " -level 4.0";
                        break;

                    case "41":
                        param += " -level 4.1";
                        break;

                    case "42":
                        param += " -level 4.2";
                        break;

                    case "50":
                        param += " -level 5.0";
                        break;

                    case "51":
                        param += " -level 5.1";
                        break;

                    case "52":
                        param += " -level 5.2";
                        break;

                    default:
                        param += " -level " + levelString;
                        break;
                    }
                }
                else if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase))
                {
                    param += " -level " + levelString;
                }
            }

            if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase) &&
                !string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase) &&
                !string.Equals(videoEncoder, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
            {
                param = "-pix_fmt yuv420p " + param;
            }

            return(param);
        }
Exemplo n.º 27
0
        private string GetUserAgentParam(EncodingJob job)
        {
            string useragent = null;

            job.RemoteHttpHeaders.TryGetValue("User-Agent", out useragent);

            if (!string.IsNullOrWhiteSpace(useragent))
            {
                return "-user-agent \"" + useragent + "\"";
            }

            return string.Empty;
        }
Exemplo n.º 28
0
 protected abstract Task <string> GetCommandLineArguments(EncodingJob job);
Exemplo n.º 29
0
        /// <summary>
        /// Gets the input argument.
        /// </summary>
        /// <param name="job">The job.</param>
        /// <returns>System.String.</returns>
        protected string GetInputArgument(EncodingJob job)
        {
            var arg = "-i " + GetInputPathArgument(job);

            if (job.SubtitleStream != null)
            {
                if (job.SubtitleStream.IsExternal && !job.SubtitleStream.IsTextSubtitleStream)
                {
                    arg += " -i \"" + job.SubtitleStream.Path + "\"";
                }
            }

            return arg;
        }
Exemplo n.º 30
0
        /// <summary>
        /// Gets the video bitrate to specify on the command line
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="videoCodec">The video codec.</param>
        /// <param name="isHls">if set to <c>true</c> [is HLS].</param>
        /// <returns>System.String.</returns>
        protected string GetVideoQualityParam(EncodingJob state, string videoCodec, bool isHls)
        {
            var param = string.Empty;

            var isVc1 = state.VideoStream != null &&
                string.Equals(state.VideoStream.Codec, "vc1", StringComparison.OrdinalIgnoreCase);

            if (string.Equals(videoCodec, "libx264", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset superfast";

                param += " -crf 28";
            }

            else if (string.Equals(videoCodec, "libx265", StringComparison.OrdinalIgnoreCase))
            {
                param = "-preset fast";

                param += " -crf 28";
            }

            // webm
            else if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
            {
                // Values 0-3, 0 being highest quality but slower
                var profileScore = 0;

                string crf;
                var qmin = "0";
                var qmax = "50";

                crf = "10";

                if (isVc1)
                {
                    profileScore++;
                }

                // Max of 2
                profileScore = Math.Min(profileScore, 2);

                // http://www.webmproject.org/docs/encoder-parameters/
                param = string.Format("-speed 16 -quality good -profile:v {0} -slices 8 -crf {1} -qmin {2} -qmax {3}",
                    profileScore.ToString(UsCulture),
                    crf,
                    qmin,
                    qmax);
            }

            else if (string.Equals(videoCodec, "mpeg4", StringComparison.OrdinalIgnoreCase))
            {
                param = "-mbd rd -flags +mv4+aic -trellis 2 -cmp 2 -subcmp 2 -bf 2";
            }

            // asf/wmv
            else if (string.Equals(videoCodec, "wmv2", StringComparison.OrdinalIgnoreCase))
            {
                param = "-qmin 2";
            }

            else if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
            {
                param = "-mbd 2";
            }

            param += GetVideoBitrateParam(state, videoCodec, isHls);

            var framerate = GetFramerateParam(state);
            if (framerate.HasValue)
            {
                param += string.Format(" -r {0}", framerate.Value.ToString(UsCulture));
            }

            if (!string.IsNullOrEmpty(state.OutputVideoSync))
            {
                param += " -vsync " + state.OutputVideoSync;
            }

            if (!string.IsNullOrEmpty(state.Options.Profile))
            {
                param += " -profile:v " + state.Options.Profile;
            }

            if (state.Options.Level.HasValue)
            {
                param += " -level " + state.Options.Level.Value.ToString(UsCulture);
            }

            return "-pix_fmt yuv420p " + param;
        }
Exemplo n.º 31
0
        private async Task AcquireResources(EncodingJob state, CancellationToken cancellationToken)
        {
            if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && IsoManager.CanMount(state.MediaPath))
            {
                state.IsoMount = await IsoManager.Mount(state.MediaPath, cancellationToken).ConfigureAwait(false);
            }

            if (state.MediaSource.RequiresOpening && string.IsNullOrWhiteSpace(state.LiveStreamId))
            {
                var liveStreamResponse = await MediaSourceManager.OpenLiveStream(new LiveStreamRequest
                {
                    OpenToken = state.MediaSource.OpenToken

                }, false, cancellationToken).ConfigureAwait(false);

                AttachMediaStreamInfo(state, liveStreamResponse.MediaSource, state.Options);

                if (state.IsVideoRequest)
                {
                    EncodingJobFactory.TryStreamCopy(state, state.Options);
                }
            }

            if (state.MediaSource.BufferMs.HasValue)
            {
                await Task.Delay(state.MediaSource.BufferMs.Value, cancellationToken).ConfigureAwait(false);
            }
        }
Exemplo n.º 32
0
        /// <summary>
        /// Gets the map args.
        /// </summary>
        /// <param name="state">The state.</param>
        /// <returns>System.String.</returns>
        protected virtual string GetMapArgs(EncodingJob state)
        {
            // If we don't have known media info
            // If input is video, use -sn to drop subtitles
            // Otherwise just return empty
            if (state.VideoStream == null && state.AudioStream == null)
            {
                return state.IsInputVideo ? "-sn" : string.Empty;
            }

            // We have media info, but we don't know the stream indexes
            if (state.VideoStream != null && state.VideoStream.Index == -1)
            {
                return "-sn";
            }

            // We have media info, but we don't know the stream indexes
            if (state.AudioStream != null && state.AudioStream.Index == -1)
            {
                return state.IsInputVideo ? "-sn" : string.Empty;
            }

            var args = string.Empty;

            if (state.VideoStream != null)
            {
                args += string.Format("-map 0:{0}", state.VideoStream.Index);
            }
            else
            {
                args += "-map -0:v";
            }

            if (state.AudioStream != null)
            {
                args += string.Format(" -map 0:{0}", state.AudioStream.Index);
            }

            else
            {
                args += " -map -0:a";
            }

            if (state.SubtitleStream == null)
            {
                args += " -map -0:s";
            }
            else if (state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)
            {
                args += " -map 1:0 -sn";
            }

            return args;
        }
Exemplo n.º 33
0
        /// <summary>
        /// Gets the internal graphical subtitle param.
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="outputVideoCodec">The output video codec.</param>
        /// <returns>System.String.</returns>
        protected string GetGraphicalSubtitleParam(EncodingJob state, string outputVideoCodec)
        {
            var outputSizeParam = string.Empty;

            var request = state.Options;

            // Add resolution params, if specified
            if (request.Width.HasValue || request.Height.HasValue || request.MaxHeight.HasValue || request.MaxWidth.HasValue)
            {
                outputSizeParam = GetOutputSizeParam(state, outputVideoCodec).TrimEnd('"');
                outputSizeParam = "," + outputSizeParam.Substring(outputSizeParam.IndexOf("scale", StringComparison.OrdinalIgnoreCase));
            }

            var videoSizeParam = string.Empty;

            if (state.VideoStream != null && state.VideoStream.Width.HasValue && state.VideoStream.Height.HasValue)
            {
                videoSizeParam = string.Format(",scale={0}:{1}", state.VideoStream.Width.Value.ToString(UsCulture), state.VideoStream.Height.Value.ToString(UsCulture));
            }

            var mapPrefix = state.SubtitleStream.IsExternal ?
                1 :
                0;

            var subtitleStreamIndex = state.SubtitleStream.IsExternal
                ? 0
                : state.SubtitleStream.Index;

            return string.Format(" -filter_complex \"[{0}:{1}]format=yuva444p{4},lut=u=128:v=128:y=gammaval(.3)[sub] ; [0:{2}] [sub] overlay{3}\"",
                mapPrefix.ToString(UsCulture),
                subtitleStreamIndex.ToString(UsCulture),
                state.VideoStream.Index.ToString(UsCulture),
                outputSizeParam,
                videoSizeParam);
        }
Exemplo n.º 34
0
 /// <summary>
 /// Gets the number of threads.
 /// </summary>
 /// <returns>System.Int32.</returns>
 protected int GetNumberOfThreads(EncodingJob job, bool isWebm)
 {
     return job.Options.CpuCoreLimit ?? 0;
 }
Exemplo n.º 35
0
        protected double? GetFramerateParam(EncodingJob state)
        {
            if (state.Options.Framerate.HasValue)
            {
                return state.Options.Framerate.Value;
            }

            var maxrate = state.Options.MaxFramerate;

            if (maxrate.HasValue && state.VideoStream != null)
            {
                var contentRate = state.VideoStream.AverageFrameRate ?? state.VideoStream.RealFrameRate;

                if (contentRate.HasValue && contentRate.Value > maxrate.Value)
                {
                    return maxrate;
                }
            }

            return null;
        }
Exemplo n.º 36
0
        /// <summary>
        /// Enforces the resolution limit.
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="videoRequest">The video request.</param>
        private static void EnforceResolutionLimit(EncodingJob state, EncodingJobOptions videoRequest)
        {
            // Switch the incoming params to be ceilings rather than fixed values
            videoRequest.MaxWidth = videoRequest.MaxWidth ?? videoRequest.Width;
            videoRequest.MaxHeight = videoRequest.MaxHeight ?? videoRequest.Height;

            videoRequest.Width = null;
            videoRequest.Height = null;
        }
Exemplo n.º 37
0
        /// <summary>
        /// If we're going to put a fixed size on the command line, this will calculate it
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="outputVideoCodec">The output video codec.</param>
        /// <param name="allowTimeStampCopy">if set to <c>true</c> [allow time stamp copy].</param>
        /// <returns>System.String.</returns>
        protected string GetOutputSizeParam(EncodingJob state,
            string outputVideoCodec,
            bool allowTimeStampCopy = true)
        {
            // http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/

            var request = state.Options;

            var filters = new List<string>();

            if (state.DeInterlace)
            {
                filters.Add("yadif=0:-1:0");
            }

            // If fixed dimensions were supplied
            if (request.Width.HasValue && request.Height.HasValue)
            {
                var widthParam = request.Width.Value.ToString(UsCulture);
                var heightParam = request.Height.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", widthParam, heightParam));
            }

            // If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size
            else if (request.MaxWidth.HasValue && request.MaxHeight.HasValue)
            {
                var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);
                var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc(min(max(iw\\,ih*dar)\\,min({0}\\,{1}*dar))/2)*2:trunc(min(max(iw/dar\\,ih)\\,min({0}/dar\\,{1}))/2)*2", maxWidthParam, maxHeightParam));
            }

            // If a fixed width was requested
            else if (request.Width.HasValue)
            {
                var widthParam = request.Width.Value.ToString(UsCulture);

                filters.Add(string.Format("scale={0}:trunc(ow/a/2)*2", widthParam));
            }

            // If a fixed height was requested
            else if (request.Height.HasValue)
            {
                var heightParam = request.Height.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc(oh*a/2)*2:{0}", heightParam));
            }

            // If a max width was requested
            else if (request.MaxWidth.HasValue)
            {
                var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=min(iw\\,{0}):trunc(ow/dar/2)*2", maxWidthParam));
            }

            // If a max height was requested
            else if (request.MaxHeight.HasValue)
            {
                var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc(oh*a/2)*2:min(ih\\,{0})", maxHeightParam));
            }

            var output = string.Empty;

            if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream)
            {
                var subParam = GetTextSubtitleParam(state);

                filters.Add(subParam);

                if (allowTimeStampCopy)
                {
                    output += " -copyts";
                }
            }

            if (filters.Count > 0)
            {
                output += string.Format(" -vf \"{0}\"", string.Join(",", filters.ToArray()));
            }

            return output;
        }
Exemplo n.º 38
0
        protected string GetVideoBitrateParam(EncodingJob state, string videoCodec, bool isHls)
        {
            var bitrate = state.OutputVideoBitrate;

            if (bitrate.HasValue)
            {
                var hasFixedResolution = state.Options.HasFixedResolution;

                if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
                {
                    if (hasFixedResolution)
                    {
                        return string.Format(" -minrate:v ({0}*.90) -maxrate:v ({0}*1.10) -bufsize:v {0} -b:v {0}", bitrate.Value.ToString(UsCulture));
                    }

                    // With vpx when crf is used, b:v becomes a max rate
                    // https://trac.ffmpeg.org/wiki/vpxEncodingGuide. But higher bitrate source files -b:v causes judder so limite the bitrate but dont allow it to "saturate" the bitrate. So dont contrain it down just up.
                    return string.Format(" -maxrate:v {0} -bufsize:v ({0}*2) -b:v {0}", bitrate.Value.ToString(UsCulture));
                }

                if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
                {
                    return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
                }

                // H264
                if (hasFixedResolution)
                {
                    if (isHls)
                    {
                        return string.Format(" -b:v {0} -maxrate ({0}*.80) -bufsize {0}", bitrate.Value.ToString(UsCulture));
                    }

                    return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
                }

                return string.Format(" -maxrate {0} -bufsize {1}",
                    bitrate.Value.ToString(UsCulture),
                    (bitrate.Value * 2).ToString(UsCulture));
            }

            return string.Empty;
        }
Exemplo n.º 39
0
        protected string GetAudioFilterParam(EncodingJob state, bool isHls)
        {
            var volParam = string.Empty;
            var audioSampleRate = string.Empty;

            var channels = state.OutputAudioChannels;

            // Boost volume to 200% when downsampling from 6ch to 2ch
            if (channels.HasValue && channels.Value <= 2)
            {
                if (state.AudioStream != null && state.AudioStream.Channels.HasValue && state.AudioStream.Channels.Value > 5)
                {
                    volParam = ",volume=" + GetEncodingOptions().DownMixAudioBoost.ToString(UsCulture);
                }
            }

            if (state.OutputAudioSampleRate.HasValue)
            {
                audioSampleRate = state.OutputAudioSampleRate.Value + ":";
            }

            var adelay = isHls ? "adelay=1," : string.Empty;

            var pts = string.Empty;

            if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream)
            {
                var seconds = TimeSpan.FromTicks(state.Options.StartTimeTicks ?? 0).TotalSeconds;

                pts = string.Format(",asetpts=PTS-{0}/TB", Math.Round(seconds).ToString(UsCulture));
            }

            return string.Format("-af \"{0}aresample={1}async={4}{2}{3}\"",

                adelay,
                audioSampleRate,
                volParam,
                pts,
                state.OutputAudioSync);
        }
Exemplo n.º 40
0
        public async Task <EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress <double> progress, CancellationToken cancellationToken)
        {
            var request = options;

            if (string.IsNullOrEmpty(request.AudioCodec))
            {
                request.AudioCodec = InferAudioCodec(request.OutputContainer);
            }

            var state = new EncodingJob(_logger, _liveTvManager)
            {
                Options        = options,
                IsVideoRequest = isVideoRequest,
                Progress       = progress
            };

            if (!string.IsNullOrWhiteSpace(request.AudioCodec))
            {
                state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
                request.AudioCodec         = state.SupportedAudioCodecs.FirstOrDefault();
            }

            var item = _libraryManager.GetItemById(request.ItemId);

            List <MediaStream> mediaStreams = null;

            state.ItemType = item.GetType().Name;

            if (item is ILiveTvRecording)
            {
                var recording = await _liveTvManager.GetInternalRecording(request.ItemId, cancellationToken).ConfigureAwait(false);

                state.VideoType    = VideoType.VideoFile;
                state.IsInputVideo = string.Equals(recording.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);

                var path     = recording.RecordingInfo.Path;
                var mediaUrl = recording.RecordingInfo.Url;

                var source = string.IsNullOrEmpty(request.MediaSourceId)
                    ? recording.GetMediaSources(false).First()
                    : recording.GetMediaSources(false).First(i => string.Equals(i.Id, request.MediaSourceId));

                mediaStreams = source.MediaStreams;

                // Just to prevent this from being null and causing other methods to fail
                state.MediaPath = string.Empty;

                if (!string.IsNullOrEmpty(path))
                {
                    state.MediaPath     = path;
                    state.InputProtocol = MediaProtocol.File;
                }
                else if (!string.IsNullOrEmpty(mediaUrl))
                {
                    state.MediaPath     = mediaUrl;
                    state.InputProtocol = MediaProtocol.Http;
                }

                state.RunTimeTicks               = recording.RunTimeTicks;
                state.DeInterlace                = true;
                state.OutputAudioSync            = "1000";
                state.InputVideoSync             = "-1";
                state.InputAudioSync             = "1";
                state.InputContainer             = recording.Container;
                state.ReadInputAtNativeFramerate = source.ReadAtNativeFramerate;
            }
            else if (item is LiveTvChannel)
            {
                var channel = _liveTvManager.GetInternalChannel(request.ItemId);

                state.VideoType    = VideoType.VideoFile;
                state.IsInputVideo = string.Equals(channel.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
                mediaStreams       = new List <MediaStream>();

                state.DeInterlace = true;

                // Just to prevent this from being null and causing other methods to fail
                state.MediaPath = string.Empty;
            }
            else if (item is IChannelMediaItem)
            {
                var mediaSource = await GetChannelMediaInfo(request.ItemId, request.MediaSourceId, cancellationToken).ConfigureAwait(false);

                state.IsInputVideo               = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
                state.InputProtocol              = mediaSource.Protocol;
                state.MediaPath                  = mediaSource.Path;
                state.RunTimeTicks               = item.RunTimeTicks;
                state.RemoteHttpHeaders          = mediaSource.RequiredHttpHeaders;
                state.InputBitrate               = mediaSource.Bitrate;
                state.InputFileSize              = mediaSource.Size;
                state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate;
                mediaStreams = mediaSource.MediaStreams;
            }
            else
            {
                var hasMediaSources = (IHasMediaSources)item;
                var mediaSource     = string.IsNullOrEmpty(request.MediaSourceId)
                    ? hasMediaSources.GetMediaSources(false).First()
                    : hasMediaSources.GetMediaSources(false).First(i => string.Equals(i.Id, request.MediaSourceId));

                mediaStreams = mediaSource.MediaStreams;

                state.MediaPath                  = mediaSource.Path;
                state.InputProtocol              = mediaSource.Protocol;
                state.InputContainer             = mediaSource.Container;
                state.InputFileSize              = mediaSource.Size;
                state.InputBitrate               = mediaSource.Bitrate;
                state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate;

                var video = item as Video;

                if (video != null)
                {
                    state.IsInputVideo = true;

                    if (mediaSource.VideoType.HasValue)
                    {
                        state.VideoType = mediaSource.VideoType.Value;
                    }

                    state.IsoType = mediaSource.IsoType;

                    state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList();

                    if (mediaSource.Timestamp.HasValue)
                    {
                        state.InputTimestamp = mediaSource.Timestamp.Value;
                    }
                }

                state.RunTimeTicks = mediaSource.RunTimeTicks;
            }

            AttachMediaStreamInfo(state, mediaStreams, request);

            state.OutputAudioBitrate    = GetAudioBitrateParam(request, state.AudioStream);
            state.OutputAudioSampleRate = request.AudioSampleRate;

            state.OutputAudioCodec = GetAudioCodec(request);

            state.OutputAudioChannels = GetNumAudioChannelsParam(request, state.AudioStream, state.OutputAudioCodec);

            if (isVideoRequest)
            {
                state.OutputVideoCodec   = GetVideoCodec(request);
                state.OutputVideoBitrate = GetVideoBitrateParamValue(request, state.VideoStream);

                if (state.OutputVideoBitrate.HasValue)
                {
                    var resolution = ResolutionNormalizer.Normalize(state.OutputVideoBitrate.Value,
                                                                    state.OutputVideoCodec,
                                                                    request.MaxWidth,
                                                                    request.MaxHeight);

                    request.MaxWidth  = resolution.MaxWidth;
                    request.MaxHeight = resolution.MaxHeight;
                }
            }

            ApplyDeviceProfileSettings(state);

            if (isVideoRequest)
            {
                if (state.VideoStream != null && CanStreamCopyVideo(request, state.VideoStream))
                {
                    state.OutputVideoCodec = "copy";
                }

                if (state.AudioStream != null && CanStreamCopyAudio(request, state.AudioStream, state.SupportedAudioCodecs))
                {
                    state.OutputAudioCodec = "copy";
                }
            }

            return(state);
        }
Exemplo n.º 41
0
        internal static void AttachMediaStreamInfo(EncodingJob state,
            MediaSourceInfo mediaSource,
            EncodingJobOptions videoRequest)
        {
            state.MediaPath = mediaSource.Path;
            state.InputProtocol = mediaSource.Protocol;
            state.InputContainer = mediaSource.Container;
            state.InputFileSize = mediaSource.Size;
            state.InputBitrate = mediaSource.Bitrate;
            state.RunTimeTicks = mediaSource.RunTimeTicks;
            state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;

            if (mediaSource.ReadAtNativeFramerate)
            {
                state.ReadInputAtNativeFramerate = true;
            }

            if (mediaSource.VideoType.HasValue)
            {
                state.VideoType = mediaSource.VideoType.Value;
            }

            state.IsoType = mediaSource.IsoType;

            state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList();

            if (mediaSource.Timestamp.HasValue)
            {
                state.InputTimestamp = mediaSource.Timestamp.Value;
            }

            state.InputProtocol = mediaSource.Protocol;
            state.MediaPath = mediaSource.Path;
            state.RunTimeTicks = mediaSource.RunTimeTicks;
            state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
            state.InputBitrate = mediaSource.Bitrate;
            state.InputFileSize = mediaSource.Size;

            if (state.ReadInputAtNativeFramerate ||
                mediaSource.Protocol == MediaProtocol.File && string.Equals(mediaSource.Container, "wtv", StringComparison.OrdinalIgnoreCase))
            {
                state.OutputAudioSync = "1000";
                state.InputVideoSync = "-1";
                state.InputAudioSync = "1";
            }

            var mediaStreams = mediaSource.MediaStreams;

            if (videoRequest != null)
            {
                if (string.IsNullOrEmpty(videoRequest.VideoCodec))
                {
                    videoRequest.VideoCodec = InferVideoCodec(videoRequest.OutputContainer);
                }

                state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video);
                state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false);
                state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio);

                if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal)
                {
                    state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream);
                }

                if (state.VideoStream != null && state.VideoStream.IsInterlaced)
                {
                    state.DeInterlace = true;
                }

                EnforceResolutionLimit(state, videoRequest);
            }
            else
            {
                state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true);
            }

            state.MediaSource = mediaSource;
        }
Exemplo n.º 42
0
        /// <summary>
        /// If we're going to put a fixed size on the command line, this will calculate it
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="outputVideoCodec">The output video codec.</param>
        /// <param name="allowTimeStampCopy">if set to <c>true</c> [allow time stamp copy].</param>
        /// <returns>System.String.</returns>
        protected async Task <string> GetOutputSizeParam(EncodingJob state,
                                                         string outputVideoCodec,
                                                         bool allowTimeStampCopy = true)
        {
            // http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/

            var request = state.Options;

            var filters = new List <string>();

            if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
            {
                filters.Add("format=nv12|vaapi");
                filters.Add("hwupload");
            }
            else if (state.DeInterlace && !string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
            {
                filters.Add("yadif=0:-1:0");
            }

            if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
            {
                // Work around vaapi's reduced scaling features
                var scaler = "scale_vaapi";

                // Given the input dimensions (inputWidth, inputHeight), determine the output dimensions
                // (outputWidth, outputHeight). The user may request precise output dimensions or maximum
                // output dimensions. Output dimensions are guaranteed to be even.
                decimal inputWidth    = Convert.ToDecimal(state.VideoStream.Width);
                decimal inputHeight   = Convert.ToDecimal(state.VideoStream.Height);
                decimal outputWidth   = request.Width.HasValue ? Convert.ToDecimal(request.Width.Value) : inputWidth;
                decimal outputHeight  = request.Height.HasValue ? Convert.ToDecimal(request.Height.Value) : inputHeight;
                decimal maximumWidth  = request.MaxWidth.HasValue ? Convert.ToDecimal(request.MaxWidth.Value) : outputWidth;
                decimal maximumHeight = request.MaxHeight.HasValue ? Convert.ToDecimal(request.MaxHeight.Value) : outputHeight;

                if (outputWidth > maximumWidth || outputHeight > maximumHeight)
                {
                    var scale = Math.Min(maximumWidth / outputWidth, maximumHeight / outputHeight);
                    outputWidth  = Math.Min(maximumWidth, Math.Truncate(outputWidth * scale));
                    outputHeight = Math.Min(maximumHeight, Math.Truncate(outputHeight * scale));
                }

                outputWidth  = 2 * Math.Truncate(outputWidth / 2);
                outputHeight = 2 * Math.Truncate(outputHeight / 2);

                if (outputWidth != inputWidth || outputHeight != inputHeight)
                {
                    filters.Add(string.Format("{0}=w={1}:h={2}", scaler, outputWidth.ToString(UsCulture), outputHeight.ToString(UsCulture)));
                }
            }
            else
            {
                // If fixed dimensions were supplied
                if (request.Width.HasValue && request.Height.HasValue)
                {
                    var widthParam  = request.Width.Value.ToString(UsCulture);
                    var heightParam = request.Height.Value.ToString(UsCulture);

                    filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", widthParam, heightParam));
                }

                // If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size
                else if (request.MaxWidth.HasValue && request.MaxHeight.HasValue)
                {
                    var maxWidthParam  = request.MaxWidth.Value.ToString(UsCulture);
                    var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);

                    filters.Add(string.Format("scale=trunc(min(max(iw\\,ih*dar)\\,min({0}\\,{1}*dar))/2)*2:trunc(min(max(iw/dar\\,ih)\\,min({0}/dar\\,{1}))/2)*2", maxWidthParam, maxHeightParam));
                }

                // If a fixed width was requested
                else if (request.Width.HasValue)
                {
                    var widthParam = request.Width.Value.ToString(UsCulture);

                    filters.Add(string.Format("scale={0}:trunc(ow/a/2)*2", widthParam));
                }

                // If a fixed height was requested
                else if (request.Height.HasValue)
                {
                    var heightParam = request.Height.Value.ToString(UsCulture);

                    filters.Add(string.Format("scale=trunc(oh*a/2)*2:{0}", heightParam));
                }

                // If a max width was requested
                else if (request.MaxWidth.HasValue)
                {
                    var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);

                    filters.Add(string.Format("scale=trunc(min(max(iw\\,ih*dar)\\,{0})/2)*2:trunc(ow/dar/2)*2", maxWidthParam));
                }

                // If a max height was requested
                else if (request.MaxHeight.HasValue)
                {
                    var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);

                    filters.Add(string.Format("scale=trunc(oh*a/2)*2:min(max(iw/dar\\,ih)\\,{0})", maxHeightParam));
                }
            }

            var output = string.Empty;

            if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode)
            {
                var subParam = await GetTextSubtitleParam(state).ConfigureAwait(false);

                filters.Add(subParam);

                if (allowTimeStampCopy)
                {
                    output += " -copyts";
                }
            }

            if (filters.Count > 0)
            {
                output += string.Format(" -vf \"{0}\"", string.Join(",", filters.ToArray()));
            }

            return(output);
        }
Exemplo n.º 43
0
        public async Task<EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress<double> progress, CancellationToken cancellationToken)
        {
            var request = options;

            if (string.IsNullOrEmpty(request.AudioCodec))
            {
                request.AudioCodec = InferAudioCodec(request.OutputContainer);
            }

            var state = new EncodingJob(_logger, _mediaSourceManager)
            {
                Options = options,
                IsVideoRequest = isVideoRequest,
                Progress = progress
            };

            if (!string.IsNullOrWhiteSpace(request.AudioCodec))
            {
                state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
                request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault();
            }

            var item = _libraryManager.GetItemById(request.ItemId);
            state.ItemType = item.GetType().Name;

            state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);

            var mediaSources = await _mediaSourceManager.GetPlayackMediaSources(request.ItemId, null, false, new[] { MediaType.Audio, MediaType.Video }, cancellationToken).ConfigureAwait(false);

            var mediaSource = string.IsNullOrEmpty(request.MediaSourceId)
               ? mediaSources.First()
               : mediaSources.First(i => string.Equals(i.Id, request.MediaSourceId));

            AttachMediaStreamInfo(state, mediaSource, options);

            state.OutputAudioBitrate = GetAudioBitrateParam(request, state.AudioStream);
            state.OutputAudioSampleRate = request.AudioSampleRate;

            state.OutputAudioCodec = GetAudioCodec(request);

            state.OutputAudioChannels = GetNumAudioChannelsParam(request, state.AudioStream, state.OutputAudioCodec);

            if (isVideoRequest)
            {
                state.OutputVideoCodec = GetVideoCodec(request);
                state.OutputVideoBitrate = GetVideoBitrateParamValue(request, state.VideoStream);

                if (state.OutputVideoBitrate.HasValue)
                {
                    var resolution = ResolutionNormalizer.Normalize(state.OutputVideoBitrate.Value,
                        state.OutputVideoCodec,
                        request.MaxWidth,
                        request.MaxHeight);

                    request.MaxWidth = resolution.MaxWidth;
                    request.MaxHeight = resolution.MaxHeight;
                }
            }

            ApplyDeviceProfileSettings(state);

            TryStreamCopy(state, request);

            return state;
        }
Exemplo n.º 44
0
        protected virtual string GetOutputFileExtension(EncodingJob state)
        {
            if (!string.IsNullOrWhiteSpace(state.Options.OutputContainer))
            {
                return "." + state.Options.OutputContainer;
            }

            return null;
        }
Exemplo n.º 45
0
        private void ApplyDeviceProfileSettings(EncodingJob state)
        {
            var profile = state.Options.DeviceProfile;

            if (profile == null)
            {
                // Don't use settings from the default profile. 
                // Only use a specific profile if it was requested.
                return;
            }

            var audioCodec = state.ActualOutputAudioCodec;

            var videoCodec = state.ActualOutputVideoCodec;
            var outputContainer = state.Options.OutputContainer;

            var mediaProfile = state.IsVideoRequest ?
                profile.GetAudioMediaProfile(outputContainer, audioCodec, state.OutputAudioChannels, state.OutputAudioBitrate) :
                profile.GetVideoMediaProfile(outputContainer,
                audioCodec,
                videoCodec,
                state.OutputWidth,
                state.OutputHeight,
                state.TargetVideoBitDepth,
                state.OutputVideoBitrate,
                state.TargetVideoProfile,
                state.TargetVideoLevel,
                state.TargetFramerate,
                state.TargetPacketLength,
                state.TargetTimestamp,
                state.IsTargetAnamorphic,
                state.IsTargetCabac,
                state.TargetRefFrames,
                state.TargetVideoStreamCount,
                state.TargetAudioStreamCount,
                state.TargetVideoCodecTag);

            if (mediaProfile != null)
            {
                state.MimeType = mediaProfile.MimeType;
            }

            var transcodingProfile = state.IsVideoRequest ?
                profile.GetAudioTranscodingProfile(outputContainer, audioCodec) :
                profile.GetVideoTranscodingProfile(outputContainer, audioCodec, videoCodec);

            if (transcodingProfile != null)
            {
                state.EstimateContentLength = transcodingProfile.EstimateContentLength;
                state.EnableMpegtsM2TsMode = transcodingProfile.EnableMpegtsM2TsMode;
                state.TranscodeSeekInfo = transcodingProfile.TranscodeSeekInfo;
            }
        }
Exemplo n.º 46
0
        /// <summary>
        /// If we're going to put a fixed size on the command line, this will calculate it
        /// </summary>
        /// <param name="state">The state.</param>
        /// <param name="outputVideoCodec">The output video codec.</param>
        /// <param name="allowTimeStampCopy">if set to <c>true</c> [allow time stamp copy].</param>
        /// <returns>System.String.</returns>
        protected string GetOutputSizeParam(EncodingJob state,
                                            string outputVideoCodec,
                                            bool allowTimeStampCopy = true)
        {
            // http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/

            var request = state.Options;

            var filters = new List <string>();

            if (state.DeInterlace)
            {
                filters.Add("yadif=0:-1:0");
            }

            // If fixed dimensions were supplied
            if (request.Width.HasValue && request.Height.HasValue)
            {
                var widthParam  = request.Width.Value.ToString(UsCulture);
                var heightParam = request.Height.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", widthParam, heightParam));
            }

            // If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size
            else if (request.MaxWidth.HasValue && request.MaxHeight.HasValue)
            {
                var maxWidthParam  = request.MaxWidth.Value.ToString(UsCulture);
                var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc(min(max(iw\\,ih*dar)\\,min({0}\\,{1}*dar))/2)*2:trunc(min(max(iw/dar\\,ih)\\,min({0}/dar\\,{1}))/2)*2", maxWidthParam, maxHeightParam));
            }

            // If a fixed width was requested
            else if (request.Width.HasValue)
            {
                var widthParam = request.Width.Value.ToString(UsCulture);

                filters.Add(string.Format("scale={0}:trunc(ow/a/2)*2", widthParam));
            }

            // If a fixed height was requested
            else if (request.Height.HasValue)
            {
                var heightParam = request.Height.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc(oh*a/2)*2:{0}", heightParam));
            }

            // If a max width was requested
            else if (request.MaxWidth.HasValue)
            {
                var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=min(iw\\,{0}):trunc(ow/dar/2)*2", maxWidthParam));
            }

            // If a max height was requested
            else if (request.MaxHeight.HasValue)
            {
                var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);

                filters.Add(string.Format("scale=trunc(oh*a/2)*2:min(ih\\,{0})", maxHeightParam));
            }

            var output = string.Empty;

            if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream)
            {
                var subParam = GetTextSubtitleParam(state);

                filters.Add(subParam);

                if (allowTimeStampCopy)
                {
                    output += " -copyts";
                }
            }

            if (filters.Count > 0)
            {
                output += string.Format(" -vf \"{0}\"", string.Join(",", filters.ToArray()));
            }

            return(output);
        }