예제 #1
0
        public static int GetNumberOfThreads(InternalEncodingTask state, bool isWebm)
        {
            // Use more when this is true. -re will keep cpu usage under control
            if (state.ReadInputAtNativeFramerate)
            {
                if (isWebm)
                {
                    return(Math.Max(Environment.ProcessorCount - 1, 2));
                }

                return(0);
            }

            // Webm: http://www.webmproject.org/docs/encoder-parameters/
            // The decoder will usually automatically use an appropriate number of threads according to how many cores are available but it can only use multiple threads
            // for the coefficient data if the encoder selected --token-parts > 0 at encode time.

            switch (state.QualitySetting)
            {
            case EncodingQuality.HighSpeed:
                return(2);

            case EncodingQuality.HighQuality:
                return(2);

            case EncodingQuality.MaxQuality:
                return(isWebm ? Math.Max(Environment.ProcessorCount - 1, 2) : 0);

            default:
                throw new Exception("Unrecognized MediaEncodingQuality value.");
            }
        }
예제 #2
0
        private string GetOutputModifier(InternalEncodingTask task)
        {
            var options = task.Request;

            var audioTranscodeParams = new List <string>
            {
                "-threads " + EncodingUtils.GetNumberOfThreads(task, false).ToString(_usCulture),
                "-vn"
            };

            var bitrate = EncodingUtils.GetAudioBitrateParam(task);

            if (bitrate.HasValue)
            {
                audioTranscodeParams.Add("-ab " + bitrate.Value.ToString(_usCulture));
            }

            var channels = EncodingUtils.GetNumAudioChannelsParam(options, task.AudioStream);

            if (channels.HasValue)
            {
                audioTranscodeParams.Add("-ac " + channels.Value);
            }

            if (options.AudioSampleRate.HasValue)
            {
                audioTranscodeParams.Add("-ar " + options.AudioSampleRate.Value);
            }

            return(string.Join(" ", audioTranscodeParams.ToArray()));
        }
예제 #3
0
        private string GetOutputModifier(InternalEncodingTask task)
        {
            var options = task.Request;

            var audioTranscodeParams = new List<string>
            {
                "-threads " + EncodingUtils.GetNumberOfThreads(task, false).ToString(_usCulture),
                "-vn"
            };

            var bitrate = EncodingUtils.GetAudioBitrateParam(task);

            if (bitrate.HasValue)
            {
                audioTranscodeParams.Add("-ab " + bitrate.Value.ToString(_usCulture));
            }

            var channels = EncodingUtils.GetNumAudioChannelsParam(options, task.AudioStream);

            if (channels.HasValue)
            {
                audioTranscodeParams.Add("-ac " + channels.Value);
            }

            if (options.AudioSampleRate.HasValue)
            {
                audioTranscodeParams.Add("-ar " + options.AudioSampleRate.Value);
            }

            return string.Join(" ", audioTranscodeParams.ToArray());
        }
예제 #4
0
        private string GetArguments(InternalEncodingTask task, string mountedPath)
        {
            var options = task.Request;

            return(string.Format("{0} -i {1} {2} -id3v2_version 3 -write_id3v1 1 \"{3}\"",
                                 GetInputModifier(task),
                                 GetInputArgument(task),
                                 GetOutputModifier(task),
                                 options.OutputPath).Trim());
        }
예제 #5
0
        private string GetArguments(InternalEncodingTask task, string mountedPath)
        {
            var options = task.Request;

            return string.Format("{0} -i {1} {2} -id3v2_version 3 -write_id3v1 1 \"{3}\"",
                GetInputModifier(task),
                GetInputArgument(task),
                GetOutputModifier(task),
                options.OutputPath).Trim();
        }
예제 #6
0
        public static int?GetAudioBitrateParam(InternalEncodingTask task)
        {
            if (task.Request.AudioBitRate.HasValue)
            {
                // Make sure we don't request a bitrate higher than the source
                var currentBitrate = task.AudioStream == null ? task.Request.AudioBitRate.Value : task.AudioStream.BitRate ?? task.Request.AudioBitRate.Value;

                return(Math.Min(currentBitrate, task.Request.AudioBitRate.Value));
            }

            return(null);
        }
예제 #7
0
        private void ApplyDeviceProfileSettings(InternalEncodingTask state)
        {
            var profile = state.Request.DeviceProfile;

            if (profile == null)
            {
                // Don't use settings from the default profile.
                // Only use a specific profile if it was requested.
                return;
            }

            var container = state.Request.Container;

            var audioCodec = state.Request.AudioCodec;

            if (string.Equals(audioCodec, "copy", StringComparison.OrdinalIgnoreCase) && state.AudioStream != null)
            {
                audioCodec = state.AudioStream.Codec;
            }

            var videoCodec = state.VideoRequest == null ? null : state.VideoRequest.VideoCodec;

            if (string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase) && state.VideoStream != null)
            {
                videoCodec = state.VideoStream.Codec;
            }

            //var mediaProfile = state.VideoRequest == null ?
            //    profile.GetAudioMediaProfile(container, audioCodec) :
            //    profile.GetVideoMediaProfile(container, audioCodec, videoCodec, state.AudioStream, state.VideoStream);

            //if (mediaProfile != null)
            //{
            //    state.MimeType = mediaProfile.MimeType;
            //    state.OrgPn = mediaProfile.OrgPn;
            //}

            //var transcodingProfile = state.VideoRequest == null ?
            //    profile.GetAudioTranscodingProfile(container, audioCodec) :
            //    profile.GetVideoTranscodingProfile(container, audioCodec, videoCodec);

            //if (transcodingProfile != null)
            //{
            //    //state.EstimateContentLength = transcodingProfile.EstimateContentLength;
            //    state.EnableMpegtsM2TsMode = transcodingProfile.EnableMpegtsM2TsMode;
            //    //state.TranscodeSeekInfo = transcodingProfile.TranscodeSeekInfo;

            //    if (state.VideoRequest != null && string.IsNullOrWhiteSpace(state.VideoRequest.VideoProfile))
            //    {
            //        state.VideoRequest.VideoProfile = transcodingProfile.VideoProfile;
            //    }
            //}
        }
예제 #8
0
        private static string GetCommonInputModifier(InternalEncodingTask options)
        {
            var inputModifier = string.Empty;

            if (options.EnableDebugLogging)
            {
                inputModifier += "-loglevel debug";
            }

            var probeSize = GetProbeSizeArgument(options.InputVideoType.HasValue && options.InputVideoType.Value == VideoType.Dvd);

            inputModifier += " " + probeSize;
            inputModifier  = inputModifier.Trim();

            if (!string.IsNullOrWhiteSpace(options.UserAgent))
            {
                inputModifier += " -user-agent \"" + options.UserAgent + "\"";
            }

            inputModifier += " " + GetFastSeekValue(options.Request);
            inputModifier  = inputModifier.Trim();

            if (!string.IsNullOrEmpty(options.InputFormat))
            {
                inputModifier += " -f " + options.InputFormat;
            }

            if (!string.IsNullOrEmpty(options.InputAudioCodec))
            {
                inputModifier += " -acodec " + options.InputAudioCodec;
            }

            if (!string.IsNullOrEmpty(options.InputAudioSync))
            {
                inputModifier += " -async " + options.InputAudioSync;
            }

            if (options.ReadInputAtNativeFramerate)
            {
                inputModifier += " -re";
            }

            return(inputModifier);
        }
예제 #9
0
        public static string GetInputModifier(InternalEncodingTask options)
        {
            var inputModifier = GetCommonInputModifier(options);

            //if (state.VideoRequest != null)
            //{
            //    inputModifier += " -fflags genpts";
            //}

            //if (!string.IsNullOrEmpty(state.InputVideoCodec))
            //{
            //    inputModifier += " -vcodec " + state.InputVideoCodec;
            //}

            //if (!string.IsNullOrEmpty(state.InputVideoSync))
            //{
            //    inputModifier += " -vsync " + state.InputVideoSync;
            //}

            return inputModifier;
        }
예제 #10
0
        public static string GetInputModifier(InternalEncodingTask options)
        {
            var inputModifier = GetCommonInputModifier(options);

            //if (state.VideoRequest != null)
            //{
            //    inputModifier += " -fflags genpts";
            //}

            //if (!string.IsNullOrEmpty(state.InputVideoCodec))
            //{
            //    inputModifier += " -vcodec " + state.InputVideoCodec;
            //}

            //if (!string.IsNullOrEmpty(state.InputVideoSync))
            //{
            //    inputModifier += " -vsync " + state.InputVideoSync;
            //}

            return(inputModifier);
        }
예제 #11
0
        public static int? GetAudioBitrateParam(InternalEncodingTask task)
        {
            if (task.Request.AudioBitRate.HasValue)
            {
                // Make sure we don't request a bitrate higher than the source
                var currentBitrate = task.AudioStream == null ? task.Request.AudioBitRate.Value : task.AudioStream.BitRate ?? task.Request.AudioBitRate.Value;

                return Math.Min(currentBitrate, task.Request.AudioBitRate.Value);
            }

            return null;
        }
예제 #12
0
        public async Task Start(InternalEncodingTask task, Func <InternalEncodingTask, string, string> argumentsFactory)
        {
            _task = task;
            if (!File.Exists(_ffmpegPath))
            {
                throw new InvalidOperationException("ffmpeg was not found at " + _ffmpegPath);
            }

            Directory.CreateDirectory(Path.GetDirectoryName(task.Request.OutputPath));

            string mountedPath = null;

            if (task.InputVideoType.HasValue && task.InputVideoType == VideoType.Iso && task.IsoType.HasValue)
            {
                if (_isoManager.CanMount(task.MediaPath))
                {
                    _isoMount = await _isoManager.Mount(task.MediaPath, CancellationToken.None).ConfigureAwait(false);

                    mountedPath = _isoMount.MountedPath;
                }
            }

            var process = new Process
            {
                StartInfo = new ProcessStartInfo
                {
                    CreateNoWindow  = true,
                    UseShellExecute = false,

                    // Must consume both stdout and stderr or deadlocks may occur
                    RedirectStandardOutput = true,
                    RedirectStandardError  = true,

                    FileName         = _ffmpegPath,
                    WorkingDirectory = Path.GetDirectoryName(_ffmpegPath),
                    Arguments        = argumentsFactory(task, mountedPath),

                    WindowStyle = ProcessWindowStyle.Hidden,
                    ErrorDialog = false
                },

                EnableRaisingEvents = true
            };

            _logger.Info(process.StartInfo.FileName + " " + process.StartInfo.Arguments);

            var logFilePath = Path.Combine(_appPaths.LogDirectoryPath, "ffmpeg-" + task.Id + ".txt");

            Directory.CreateDirectory(Path.GetDirectoryName(logFilePath));

            // FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory.
            _logFileStream = _fileSystem.GetFileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, true);

            process.Exited += process_Exited;

            try
            {
                process.Start();
            }
            catch (Exception ex)
            {
                _logger.ErrorException("Error starting ffmpeg", ex);

                task.OnError();

                DisposeLogFileStream();

                process.Dispose();

                throw;
            }

            task.OnBegin();

            // MUST read both stdout and stderr asynchronously or a deadlock may occurr
            process.BeginOutputReadLine();

#pragma warning disable 4014
            // Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback
            process.StandardError.BaseStream.CopyToAsync(_logFileStream);
#pragma warning restore 4014
        }
        public async Task<InternalEncodingTask> Create(EncodingOptions request, CancellationToken cancellationToken)
        {
            ValidateInput(request);

            var state = new InternalEncodingTask
            {
                Request = request
            };

            var item = string.IsNullOrEmpty(request.MediaSourceId) ?
                _libraryManager.GetItemById(new Guid(request.ItemId)) :
                _libraryManager.GetItemById(new Guid(request.MediaSourceId));

            if (item is ILiveTvRecording)
            {
                var recording = await _liveTvManager.GetInternalRecording(request.ItemId, cancellationToken).ConfigureAwait(false);

                if (string.Equals(recording.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase))
                {
                    state.InputVideoType = VideoType.VideoFile;
                }

                var path = recording.RecordingInfo.Path;
                var mediaUrl = recording.RecordingInfo.Url;

                if (string.IsNullOrWhiteSpace(path) && string.IsNullOrWhiteSpace(mediaUrl))
                {
                    var streamInfo = await _liveTvManager.GetRecordingStream(request.ItemId, cancellationToken).ConfigureAwait(false);

                    state.LiveTvStreamId = streamInfo.Id;

                    path = streamInfo.Path;
                    mediaUrl = streamInfo.Url;
                }

                if (!string.IsNullOrEmpty(path) && File.Exists(path))
                {
                    state.MediaPath = path;
                    state.IsInputRemote = false;
                }
                else if (!string.IsNullOrEmpty(mediaUrl))
                {
                    state.MediaPath = mediaUrl;
                    state.IsInputRemote = true;
                }

                state.InputRunTimeTicks = recording.RunTimeTicks;
                if (recording.RecordingInfo.Status == RecordingStatus.InProgress && !state.IsInputRemote)
                {
                    await Task.Delay(1000, cancellationToken).ConfigureAwait(false);
                }

                state.ReadInputAtNativeFramerate = recording.RecordingInfo.Status == RecordingStatus.InProgress;
                state.AudioSync = "1000";
                state.DeInterlace = true;
                state.InputVideoSync = "-1";
                state.InputAudioSync = "1";
            }
            else if (item is LiveTvChannel)
            {
                var channel = _liveTvManager.GetInternalChannel(request.ItemId);

                if (string.Equals(channel.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase))
                {
                    state.InputVideoType = VideoType.VideoFile;
                }

                var streamInfo = await _liveTvManager.GetChannelStream(request.ItemId, cancellationToken).ConfigureAwait(false);

                state.LiveTvStreamId = streamInfo.Id;

                if (!string.IsNullOrEmpty(streamInfo.Path) && File.Exists(streamInfo.Path))
                {
                    state.MediaPath = streamInfo.Path;
                    state.IsInputRemote = false;

                    await Task.Delay(1000, cancellationToken).ConfigureAwait(false);
                }
                else if (!string.IsNullOrEmpty(streamInfo.Url))
                {
                    state.MediaPath = streamInfo.Url;
                    state.IsInputRemote = true;
                }

                state.ReadInputAtNativeFramerate = true;
                state.AudioSync = "1000";
                state.DeInterlace = true;
                state.InputVideoSync = "-1";
                state.InputAudioSync = "1";
            }
            else
            {
                state.MediaPath = item.Path;
                state.IsInputRemote = item.LocationType == LocationType.Remote;

                var video = item as Video;

                if (video != null)
                {
                    state.InputVideoType = video.VideoType;
                    state.IsoType = video.IsoType;

                    state.StreamFileNames = video.PlayableStreamFileNames.ToList();
                }

                state.InputRunTimeTicks = item.RunTimeTicks;
            }

            var videoRequest = request as VideoEncodingOptions;

            var mediaStreams = _itemRepo.GetMediaStreams(new MediaStreamQuery
            {
                ItemId = item.Id

            }).ToList();

            if (videoRequest != null)
            {
                state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video);
                state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false);
                state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio);

                if (state.VideoStream != null && state.VideoStream.IsInterlaced)
                {
                    state.DeInterlace = true;
                }
            }
            else
            {
                state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true);
            }

            state.HasMediaStreams = mediaStreams.Count > 0;

            state.SegmentLength = state.ReadInputAtNativeFramerate ? 5 : 10;
            state.HlsListSize = state.ReadInputAtNativeFramerate ? 100 : 1440;

            state.QualitySetting = GetQualitySetting();

            ApplyDeviceProfileSettings(state);

            return state;
        }
예제 #14
0
 private string GetInputArgument(InternalEncodingTask task)
 {
     return(EncodingUtils.GetInputArgument(new List <string> {
         task.MediaPath
     }, task.IsInputRemote));
 }
예제 #15
0
 private string GetInputModifier(InternalEncodingTask task)
 {
     return(EncodingUtils.GetInputModifier(task));
 }
예제 #16
0
 public static string GetAudioInputModifier(InternalEncodingTask options)
 {
     return(GetCommonInputModifier(options));
 }
예제 #17
0
 public Task BeginEncoding(InternalEncodingTask task)
 {
     return new FFMpegProcess(_ffmpegPath, _logger, _fileSystem, _appPaths, _isoManager, _liveTvManager).Start(task, GetArguments);
 }
예제 #18
0
        public async Task <InternalEncodingTask> Create(EncodingOptions request, CancellationToken cancellationToken)
        {
            ValidateInput(request);

            var state = new InternalEncodingTask
            {
                Request = request
            };

            var item = string.IsNullOrEmpty(request.MediaSourceId) ?
                       _libraryManager.GetItemById(new Guid(request.ItemId)) :
                       _libraryManager.GetItemById(new Guid(request.MediaSourceId));

            if (item is ILiveTvRecording)
            {
                var recording = await _liveTvManager.GetInternalRecording(request.ItemId, cancellationToken).ConfigureAwait(false);

                if (string.Equals(recording.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase))
                {
                    state.InputVideoType = VideoType.VideoFile;
                }

                var path     = recording.RecordingInfo.Path;
                var mediaUrl = recording.RecordingInfo.Url;

                if (string.IsNullOrWhiteSpace(path) && string.IsNullOrWhiteSpace(mediaUrl))
                {
                    var streamInfo = await _liveTvManager.GetRecordingStream(request.ItemId, cancellationToken).ConfigureAwait(false);

                    state.LiveTvStreamId = streamInfo.Id;

                    path     = streamInfo.Path;
                    mediaUrl = streamInfo.Url;
                }

                if (!string.IsNullOrEmpty(path) && File.Exists(path))
                {
                    state.MediaPath     = path;
                    state.IsInputRemote = false;
                }
                else if (!string.IsNullOrEmpty(mediaUrl))
                {
                    state.MediaPath     = mediaUrl;
                    state.IsInputRemote = true;
                }

                state.InputRunTimeTicks = recording.RunTimeTicks;
                if (recording.RecordingInfo.Status == RecordingStatus.InProgress && !state.IsInputRemote)
                {
                    await Task.Delay(1000, cancellationToken).ConfigureAwait(false);
                }

                state.ReadInputAtNativeFramerate = recording.RecordingInfo.Status == RecordingStatus.InProgress;
                state.AudioSync      = "1000";
                state.DeInterlace    = true;
                state.InputVideoSync = "-1";
                state.InputAudioSync = "1";
            }
            else if (item is LiveTvChannel)
            {
                var channel = _liveTvManager.GetInternalChannel(request.ItemId);

                if (string.Equals(channel.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase))
                {
                    state.InputVideoType = VideoType.VideoFile;
                }

                var streamInfo = await _liveTvManager.GetChannelStream(request.ItemId, cancellationToken).ConfigureAwait(false);

                state.LiveTvStreamId = streamInfo.Id;

                if (!string.IsNullOrEmpty(streamInfo.Path) && File.Exists(streamInfo.Path))
                {
                    state.MediaPath     = streamInfo.Path;
                    state.IsInputRemote = false;

                    await Task.Delay(1000, cancellationToken).ConfigureAwait(false);
                }
                else if (!string.IsNullOrEmpty(streamInfo.Url))
                {
                    state.MediaPath     = streamInfo.Url;
                    state.IsInputRemote = true;
                }

                state.ReadInputAtNativeFramerate = true;
                state.AudioSync      = "1000";
                state.DeInterlace    = true;
                state.InputVideoSync = "-1";
                state.InputAudioSync = "1";
            }
            else
            {
                state.MediaPath     = item.Path;
                state.IsInputRemote = item.LocationType == LocationType.Remote;

                var video = item as Video;

                if (video != null)
                {
                    state.InputVideoType = video.VideoType;
                    state.IsoType        = video.IsoType;

                    state.StreamFileNames = video.PlayableStreamFileNames.ToList();
                }

                state.InputRunTimeTicks = item.RunTimeTicks;
            }

            var videoRequest = request as VideoEncodingOptions;

            var mediaStreams = _itemRepo.GetMediaStreams(new MediaStreamQuery
            {
                ItemId = item.Id
            }).ToList();

            if (videoRequest != null)
            {
                state.VideoStream    = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video);
                state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false);
                state.AudioStream    = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio);

                if (state.VideoStream != null && state.VideoStream.IsInterlaced)
                {
                    state.DeInterlace = true;
                }
            }
            else
            {
                state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true);
            }

            state.HasMediaStreams = mediaStreams.Count > 0;

            state.SegmentLength = state.ReadInputAtNativeFramerate ? 5 : 10;
            state.HlsListSize   = state.ReadInputAtNativeFramerate ? 100 : 1440;

            state.QualitySetting = GetQualitySetting();

            ApplyDeviceProfileSettings(state);

            return(state);
        }
        private void ApplyDeviceProfileSettings(InternalEncodingTask state)
        {
            var profile = state.Request.DeviceProfile;

            if (profile == null)
            {
                // Don't use settings from the default profile. 
                // Only use a specific profile if it was requested.
                return;
            }

            var container = state.Request.Container;

            var audioCodec = state.Request.AudioCodec;

            if (string.Equals(audioCodec, "copy", StringComparison.OrdinalIgnoreCase) && state.AudioStream != null)
            {
                audioCodec = state.AudioStream.Codec;
            }

            var videoCodec = state.VideoRequest == null ? null : state.VideoRequest.VideoCodec;

            if (string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase) && state.VideoStream != null)
            {
                videoCodec = state.VideoStream.Codec;
            }

            var mediaProfile = state.VideoRequest == null ?
                profile.GetAudioMediaProfile(container, audioCodec, state.AudioStream) :
                profile.GetVideoMediaProfile(container, audioCodec, videoCodec, state.AudioStream, state.VideoStream);

            if (mediaProfile != null)
            {
                state.MimeType = mediaProfile.MimeType;
                state.OrgPn = mediaProfile.OrgPn;
            }

            var transcodingProfile = state.VideoRequest == null ?
                profile.GetAudioTranscodingProfile(container, audioCodec) :
                profile.GetVideoTranscodingProfile(container, audioCodec, videoCodec);

            if (transcodingProfile != null)
            {
                //state.EstimateContentLength = transcodingProfile.EstimateContentLength;
                state.EnableMpegtsM2TsMode = transcodingProfile.EnableMpegtsM2TsMode;
                //state.TranscodeSeekInfo = transcodingProfile.TranscodeSeekInfo;

                if (state.VideoRequest != null && string.IsNullOrWhiteSpace(state.VideoRequest.VideoProfile))
                {
                    state.VideoRequest.VideoProfile = transcodingProfile.VideoProfile;
                }
            }
        }
예제 #20
0
 private string GetInputArgument(InternalEncodingTask task)
 {
     return EncodingUtils.GetInputArgument(new List<string> { task.MediaPath }, task.IsInputRemote);
 }
예제 #21
0
 private string GetInputModifier(InternalEncodingTask task)
 {
     return EncodingUtils.GetInputModifier(task);
 }
예제 #22
0
        public static int GetNumberOfThreads(InternalEncodingTask state, bool isWebm)
        {
            // Use more when this is true. -re will keep cpu usage under control
            if (state.ReadInputAtNativeFramerate)
            {
                if (isWebm)
                {
                    return Math.Max(Environment.ProcessorCount - 1, 2);
                }

                return 0;
            }

            // Webm: http://www.webmproject.org/docs/encoder-parameters/
            // The decoder will usually automatically use an appropriate number of threads according to how many cores are available but it can only use multiple threads 
            // for the coefficient data if the encoder selected --token-parts > 0 at encode time.

            switch (state.QualitySetting)
            {
                case EncodingQuality.HighSpeed:
                    return 2;
                case EncodingQuality.HighQuality:
                    return isWebm ? Math.Max(Environment.ProcessorCount - 1, 2) : 0;
                case EncodingQuality.MaxQuality:
                    return isWebm ? Math.Max(Environment.ProcessorCount - 1, 2) : 0;
                default:
                    throw new Exception("Unrecognized MediaEncodingQuality value.");
            }
        }
예제 #23
0
 public static string GetAudioInputModifier(InternalEncodingTask options)
 {
     return GetCommonInputModifier(options);
 }
예제 #24
0
 public Task BeginEncoding(InternalEncodingTask task)
 {
     return(new FFMpegProcess(_ffmpegPath, _logger, _fileSystem, _appPaths, _isoManager, _liveTvManager).Start(task, GetArguments));
 }
예제 #25
0
        private static string GetCommonInputModifier(InternalEncodingTask options)
        {
            var inputModifier = string.Empty;

            if (options.EnableDebugLogging)
            {
                inputModifier += "-loglevel debug";
            }

            var probeSize = GetProbeSizeArgument(options.InputVideoType.HasValue && options.InputVideoType.Value == VideoType.Dvd);
            inputModifier += " " + probeSize;
            inputModifier = inputModifier.Trim();

            if (!string.IsNullOrWhiteSpace(options.UserAgent))
            {
                inputModifier += " -user-agent \"" + options.UserAgent + "\"";
            }

            inputModifier += " " + GetFastSeekValue(options.Request);
            inputModifier = inputModifier.Trim();

            if (!string.IsNullOrEmpty(options.InputFormat))
            {
                inputModifier += " -f " + options.InputFormat;
            }

            if (!string.IsNullOrEmpty(options.InputAudioCodec))
            {
                inputModifier += " -acodec " + options.InputAudioCodec;
            }

            if (!string.IsNullOrEmpty(options.InputAudioSync))
            {
                inputModifier += " -async " + options.InputAudioSync;
            }

            if (options.ReadInputAtNativeFramerate)
            {
                inputModifier += " -re";
            }

            return inputModifier;
        }
예제 #26
0
        public async Task Start(InternalEncodingTask task, Func<InternalEncodingTask,string,string> argumentsFactory)
        {
            _task = task;
            if (!File.Exists(_ffmpegPath))
            {
                throw new InvalidOperationException("ffmpeg was not found at " + _ffmpegPath);
            }

            Directory.CreateDirectory(Path.GetDirectoryName(task.Request.OutputPath));

            string mountedPath = null;
            if (task.InputVideoType.HasValue && task.InputVideoType == VideoType.Iso && task.IsoType.HasValue)
            {
                if (_isoManager.CanMount(task.MediaPath))
                {
                    _isoMount = await _isoManager.Mount(task.MediaPath, CancellationToken.None).ConfigureAwait(false);
                    mountedPath = _isoMount.MountedPath;
                }
            }
            
            var process = new Process
            {
                StartInfo = new ProcessStartInfo
                {
                    CreateNoWindow = true,
                    UseShellExecute = false,

                    // Must consume both stdout and stderr or deadlocks may occur
                    RedirectStandardOutput = true,
                    RedirectStandardError = true,

                    FileName = _ffmpegPath,
                    WorkingDirectory = Path.GetDirectoryName(_ffmpegPath),
                    Arguments = argumentsFactory(task, mountedPath),

                    WindowStyle = ProcessWindowStyle.Hidden,
                    ErrorDialog = false
                },

                EnableRaisingEvents = true
            };

            _logger.Info(process.StartInfo.FileName + " " + process.StartInfo.Arguments);

            var logFilePath = Path.Combine(_appPaths.LogDirectoryPath, "ffmpeg-" + task.Id + ".txt");
            Directory.CreateDirectory(Path.GetDirectoryName(logFilePath));

            // FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory.
            _logFileStream = _fileSystem.GetFileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, true);

            process.Exited += process_Exited;

            try
            {
                process.Start();
            }
            catch (Exception ex)
            {
                _logger.ErrorException("Error starting ffmpeg", ex);

                task.OnError();

                DisposeLogFileStream();

                process.Dispose();

                throw;
            }

            task.OnBegin();

            // MUST read both stdout and stderr asynchronously or a deadlock may occurr
            process.BeginOutputReadLine();

#pragma warning disable 4014
            // Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback
            process.StandardError.BaseStream.CopyToAsync(_logFileStream);
#pragma warning restore 4014
        }