/// <summary> /// Gets the number of audio channels to specify on the command line /// </summary> /// <param name="request">The request.</param> /// <param name="audioStream">The audio stream.</param> /// <param name="outputAudioCodec">The output audio codec.</param> /// <returns>System.Nullable{System.Int32}.</returns> private int?GetNumAudioChannelsParam(EncodingJobOptions request, MediaStream audioStream, string outputAudioCodec) { if (audioStream != null) { var codec = outputAudioCodec ?? string.Empty; if (audioStream.Channels > 2 && codec.IndexOf("wma", StringComparison.OrdinalIgnoreCase) != -1) { // wmav2 currently only supports two channel output return(2); } } if (request.MaxAudioChannels.HasValue) { if (audioStream != null && audioStream.Channels.HasValue) { return(Math.Min(request.MaxAudioChannels.Value, audioStream.Channels.Value)); } // If we don't have any media info then limit it to 5 to prevent encoding errors due to asking for too many channels return(Math.Min(request.MaxAudioChannels.Value, 5)); } return(request.AudioChannels); }
/// <summary> /// Gets the name of the output video codec /// </summary> /// <param name="request">The request.</param> /// <returns>System.String.</returns> private string GetVideoCodec(EncodingJobOptions request) { var codec = request.VideoCodec; if (string.Equals(codec, "h264", StringComparison.OrdinalIgnoreCase)) { return("libx264"); } if (string.Equals(codec, "h265", StringComparison.OrdinalIgnoreCase) || string.Equals(codec, "hevc", StringComparison.OrdinalIgnoreCase)) { return("libx265"); } if (string.Equals(codec, "vpx", StringComparison.OrdinalIgnoreCase)) { return("libvpx"); } if (string.Equals(codec, "wmv", StringComparison.OrdinalIgnoreCase)) { return("wmv2"); } if (string.Equals(codec, "theora", StringComparison.OrdinalIgnoreCase)) { return("libtheora"); } return((codec ?? string.Empty).ToLower()); }
private int?GetVideoBitrateParamValue(EncodingJobOptions request, MediaStream videoStream) { var bitrate = request.VideoBitRate; if (videoStream != null) { var isUpscaling = request.Height.HasValue && videoStream.Height.HasValue && request.Height.Value > videoStream.Height.Value; if (request.Width.HasValue && videoStream.Width.HasValue && request.Width.Value > videoStream.Width.Value) { isUpscaling = true; } // Don't allow bitrate increases unless upscaling if (!isUpscaling) { if (bitrate.HasValue && videoStream.BitRate.HasValue) { bitrate = Math.Min(bitrate.Value, videoStream.BitRate.Value); } } } return(bitrate); }
internal static void AttachMediaStreamInfo(EncodingJob state, List <MediaStream> mediaStreams, EncodingJobOptions videoRequest) { if (videoRequest != null) { if (string.IsNullOrEmpty(videoRequest.VideoCodec)) { videoRequest.VideoCodec = InferVideoCodec(videoRequest.OutputContainer); } state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video); state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false); state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio); if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal) { state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream); } if (state.VideoStream != null && state.VideoStream.IsInterlaced) { state.DeInterlace = true; } EnforceResolutionLimit(state, videoRequest); } else { state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true); } state.AllMediaStreams = mediaStreams; }
/// <summary> /// Enforces the resolution limit. /// </summary> /// <param name="state">The state.</param> /// <param name="videoRequest">The video request.</param> private static void EnforceResolutionLimit(EncodingJob state, EncodingJobOptions videoRequest) { // Switch the incoming params to be ceilings rather than fixed values videoRequest.MaxWidth = videoRequest.MaxWidth ?? videoRequest.Width; videoRequest.MaxHeight = videoRequest.MaxHeight ?? videoRequest.Height; videoRequest.Width = null; videoRequest.Height = null; }
/// <summary> /// Gets the number of audio channels to specify on the command line /// </summary> /// <param name="request">The request.</param> /// <param name="audioStream">The audio stream.</param> /// <param name="outputAudioCodec">The output audio codec.</param> /// <returns>System.Nullable{System.Int32}.</returns> private int?GetNumAudioChannelsParam(EncodingJobOptions request, MediaStream audioStream, string outputAudioCodec) { var inputChannels = audioStream == null ? null : audioStream.Channels; if (inputChannels <= 0) { inputChannels = null; } int?transcoderChannelLimit = null; var codec = outputAudioCodec ?? string.Empty; if (codec.IndexOf("wma", StringComparison.OrdinalIgnoreCase) != -1) { // wmav2 currently only supports two channel output transcoderChannelLimit = 2; } else if (codec.IndexOf("mp3", StringComparison.OrdinalIgnoreCase) != -1) { // libmp3lame currently only supports two channel output transcoderChannelLimit = 2; } else { // If we don't have any media info then limit it to 6 to prevent encoding errors due to asking for too many channels transcoderChannelLimit = 6; } var isTranscodingAudio = !string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase); int?resultChannels = null; if (isTranscodingAudio) { resultChannels = request.TranscodingMaxAudioChannels; } resultChannels = resultChannels ?? request.MaxAudioChannels ?? request.AudioChannels; if (inputChannels.HasValue) { resultChannels = resultChannels.HasValue ? Math.Min(resultChannels.Value, inputChannels.Value) : inputChannels.Value; } if (isTranscodingAudio && transcoderChannelLimit.HasValue) { resultChannels = resultChannels.HasValue ? Math.Min(resultChannels.Value, transcoderChannelLimit.Value) : transcoderChannelLimit.Value; } return(resultChannels ?? request.AudioChannels); }
/// <summary> /// Gets the fast seek command line parameter. /// </summary> /// <param name="request">The request.</param> /// <returns>System.String.</returns> /// <value>The fast seek command line parameter.</value> protected string GetFastSeekCommandLineParameter(EncodingJobOptions request) { var time = request.StartTimeTicks ?? 0; if (time > 0) { return(string.Format("-ss {0}", MediaEncoder.GetTimeParameter(time))); } return(string.Empty); }
/// <summary> /// Gets the fast seek command line parameter. /// </summary> /// <param name="options">The options.</param> /// <returns>System.String.</returns> /// <value>The fast seek command line parameter.</value> protected string GetFastSeekCommandLineParameter(EncodingJobOptions options) { var time = options.StartTimeTicks; if (time.HasValue && time.Value > 0) { return(string.Format("-ss {0}", MediaEncoder.GetTimeParameter(time.Value))); } return(string.Empty); }
private int?GetAudioBitrateParam(EncodingJobOptions request, MediaStream audioStream) { if (request.AudioBitRate.HasValue) { // Make sure we don't request a bitrate higher than the source var currentBitrate = audioStream == null ? request.AudioBitRate.Value : audioStream.BitRate ?? request.AudioBitRate.Value; return(request.AudioBitRate.Value); //return Math.Min(currentBitrate, request.AudioBitRate.Value); } return(null); }
internal static bool CanStreamCopyAudio(EncodingJobOptions request, MediaStream audioStream, List <string> supportedAudioCodecs) { // Source and target codecs must match if (string.IsNullOrEmpty(audioStream.Codec) || !supportedAudioCodecs.Contains(audioStream.Codec, StringComparer.OrdinalIgnoreCase)) { return(false); } // Video bitrate must fall within requested value if (request.AudioBitRate.HasValue) { if (!audioStream.BitRate.HasValue || audioStream.BitRate.Value <= 0) { return(false); } if (audioStream.BitRate.Value > request.AudioBitRate.Value) { return(false); } } // Channels must fall within requested value var channels = request.AudioChannels ?? request.MaxAudioChannels; if (channels.HasValue) { if (!audioStream.Channels.HasValue || audioStream.Channels.Value <= 0) { return(false); } if (audioStream.Channels.Value > channels.Value) { return(false); } } // Sample rate must fall within requested value if (request.AudioSampleRate.HasValue) { if (!audioStream.SampleRate.HasValue || audioStream.SampleRate.Value <= 0) { return(false); } if (audioStream.SampleRate.Value > request.AudioSampleRate.Value) { return(false); } } return(request.EnableAutoStreamCopy); }
internal static void TryStreamCopy(EncodingJob state, EncodingJobOptions videoRequest) { if (state.IsVideoRequest) { if (state.VideoStream != null && CanStreamCopyVideo(videoRequest, state.VideoStream)) { state.OutputVideoCodec = "copy"; } if (state.AudioStream != null && CanStreamCopyAudio(videoRequest, state.AudioStream, state.SupportedAudioCodecs)) { state.OutputAudioCodec = "copy"; } } }
public async Task <string> EncodeVideo(EncodingJobOptions options, IProgress <double> progress, CancellationToken cancellationToken) { var job = await new VideoEncoder(this, _logger, ConfigurationManager, FileSystem, IsoManager, LibraryManager, SessionManager, SubtitleEncoder(), MediaSourceManager()) .Start(options, progress, cancellationToken).ConfigureAwait(false); await job.TaskCompletionSource.Task.ConfigureAwait(false); return(job.OutputFilePath); }
private int?GetVideoBitrateParamValue(EncodingJobOptions request, MediaStream videoStream, string outputVideoCodec) { var bitrate = request.VideoBitRate; if (videoStream != null) { var isUpscaling = request.Height.HasValue && videoStream.Height.HasValue && request.Height.Value > videoStream.Height.Value; if (request.Width.HasValue && videoStream.Width.HasValue && request.Width.Value > videoStream.Width.Value) { isUpscaling = true; } // Don't allow bitrate increases unless upscaling if (!isUpscaling) { if (bitrate.HasValue && videoStream.BitRate.HasValue) { bitrate = Math.Min(bitrate.Value, videoStream.BitRate.Value); } } } if (bitrate.HasValue) { var inputVideoCodec = videoStream == null ? null : videoStream.Codec; bitrate = ResolutionNormalizer.ScaleBitrate(bitrate.Value, inputVideoCodec, outputVideoCodec); // If a max bitrate was requested, don't let the scaled bitrate exceed it if (request.VideoBitRate.HasValue) { bitrate = Math.Min(bitrate.Value, request.VideoBitRate.Value); } } return(bitrate); }
/// <summary> /// Gets the name of the output audio codec /// </summary> /// <param name="request">The request.</param> /// <returns>System.String.</returns> private string GetAudioCodec(EncodingJobOptions request) { var codec = request.AudioCodec; if (string.Equals(codec, "aac", StringComparison.OrdinalIgnoreCase)) { return("aac -strict experimental"); } if (string.Equals(codec, "mp3", StringComparison.OrdinalIgnoreCase)) { return("libmp3lame"); } if (string.Equals(codec, "vorbis", StringComparison.OrdinalIgnoreCase)) { return("libvorbis"); } if (string.Equals(codec, "wma", StringComparison.OrdinalIgnoreCase)) { return("wmav2"); } return((codec ?? string.Empty).ToLower()); }
private void AttachMediaStreamInfo(EncodingJob state, ChannelMediaInfo mediaInfo, EncodingJobOptions videoRequest) { var mediaSource = mediaInfo.ToMediaSource(); state.InputProtocol = mediaSource.Protocol; state.MediaPath = mediaSource.Path; state.RunTimeTicks = mediaSource.RunTimeTicks; state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders; state.InputBitrate = mediaSource.Bitrate; state.InputFileSize = mediaSource.Size; state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate; if (state.ReadInputAtNativeFramerate) { state.OutputAudioSync = "1000"; state.InputVideoSync = "-1"; state.InputAudioSync = "1"; } EncodingJobFactory.AttachMediaStreamInfo(state, mediaSource.MediaStreams, videoRequest); }
/// <summary> /// Gets the number of audio channels to specify on the command line /// </summary> /// <param name="request">The request.</param> /// <param name="audioStream">The audio stream.</param> /// <param name="outputAudioCodec">The output audio codec.</param> /// <returns>System.Nullable{System.Int32}.</returns> private int?GetNumAudioChannelsParam(EncodingJobOptions request, MediaStream audioStream, string outputAudioCodec) { var inputChannels = audioStream == null ? null : audioStream.Channels; if (inputChannels <= 0) { inputChannels = null; } var codec = outputAudioCodec ?? string.Empty; if (codec.IndexOf("wma", StringComparison.OrdinalIgnoreCase) != -1) { // wmav2 currently only supports two channel output return(Math.Min(2, inputChannels ?? 2)); } if (request.MaxAudioChannels.HasValue) { var channelLimit = codec.IndexOf("mp3", StringComparison.OrdinalIgnoreCase) != -1 ? 2 : 6; if (inputChannels.HasValue) { channelLimit = Math.Min(channelLimit, inputChannels.Value); } // If we don't have any media info then limit it to 5 to prevent encoding errors due to asking for too many channels return(Math.Min(request.MaxAudioChannels.Value, channelLimit)); } return(request.AudioChannels); }
protected virtual string GetWorkingDirectory(EncodingJobOptions options) { return(null); }
private void AttachMediaStreamInfo(EncodingJob state, MediaSourceInfo mediaSource, EncodingJobOptions videoRequest) { EncodingJobFactory.AttachMediaStreamInfo(state, mediaSource, videoRequest); }
public async Task <EncodingJob> CreateJob(EncodingJobOptions options, EncodingHelper encodingHelper, bool isVideoRequest, IProgress <double> progress, CancellationToken cancellationToken) { var request = options; if (string.IsNullOrEmpty(request.AudioCodec)) { request.AudioCodec = InferAudioCodec(request.Container); } var state = new EncodingJob(_logger, _mediaSourceManager) { Options = options, IsVideoRequest = isVideoRequest, Progress = progress }; if (!string.IsNullOrWhiteSpace(request.VideoCodec)) { state.SupportedVideoCodecs = request.VideoCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToArray(); request.VideoCodec = state.SupportedVideoCodecs.FirstOrDefault(); } if (!string.IsNullOrWhiteSpace(request.AudioCodec)) { state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToArray(); request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault(); } if (!string.IsNullOrWhiteSpace(request.SubtitleCodec)) { state.SupportedSubtitleCodecs = request.SubtitleCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToArray(); request.SubtitleCodec = state.SupportedSubtitleCodecs.FirstOrDefault(i => _mediaEncoder.CanEncodeToSubtitleCodec(i)) ?? state.SupportedSubtitleCodecs.FirstOrDefault(); } var item = _libraryManager.GetItemById(request.Id); state.ItemType = item.GetType().Name; state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); // TODO // var primaryImage = item.GetImageInfo(ImageType.Primary, 0) ?? // item.Parents.Select(i => i.GetImageInfo(ImageType.Primary, 0)).FirstOrDefault(i => i != null); // if (primaryImage != null) // { // state.AlbumCoverPath = primaryImage.Path; // } // TODO network path substition useful ? var mediaSources = await _mediaSourceManager.GetPlayackMediaSources(item, null, true, true, cancellationToken).ConfigureAwait(false); var mediaSource = string.IsNullOrEmpty(request.MediaSourceId) ? mediaSources.First() : mediaSources.First(i => string.Equals(i.Id, request.MediaSourceId)); var videoRequest = state.Options; encodingHelper.AttachMediaSourceInfo(state, mediaSource, null); //var container = Path.GetExtension(state.RequestedUrl); //if (string.IsNullOrEmpty(container)) //{ // container = request.Static ? // state.InputContainer : // (Path.GetExtension(GetOutputFilePath(state)) ?? string.Empty).TrimStart('.'); //} //state.OutputContainer = (container ?? string.Empty).TrimStart('.'); state.OutputAudioBitrate = encodingHelper.GetAudioBitrateParam(state.Options, state.AudioStream); state.OutputAudioCodec = state.Options.AudioCodec; state.OutputAudioChannels = encodingHelper.GetNumAudioChannelsParam(state, state.AudioStream, state.OutputAudioCodec); if (videoRequest != null) { state.OutputVideoCodec = state.Options.VideoCodec; state.OutputVideoBitrate = encodingHelper.GetVideoBitrateParamValue(state.Options, state.VideoStream, state.OutputVideoCodec); if (state.OutputVideoBitrate.HasValue) { var resolution = ResolutionNormalizer.Normalize( state.VideoStream == null ? (int?)null : state.VideoStream.BitRate, state.VideoStream == null ? (int?)null : state.VideoStream.Width, state.VideoStream == null ? (int?)null : state.VideoStream.Height, state.OutputVideoBitrate.Value, state.VideoStream == null ? null : state.VideoStream.Codec, state.OutputVideoCodec, videoRequest.MaxWidth, videoRequest.MaxHeight); videoRequest.MaxWidth = resolution.MaxWidth; videoRequest.MaxHeight = resolution.MaxHeight; } } ApplyDeviceProfileSettings(state); if (videoRequest != null) { encodingHelper.TryStreamCopy(state); } //state.OutputFilePath = GetOutputFilePath(state); return(state); }
internal static bool CanStreamCopyVideo(EncodingJobOptions request, MediaStream videoStream) { if (videoStream.IsInterlaced) { return(false); } if (videoStream.IsAnamorphic ?? false) { return(false); } // Can't stream copy if we're burning in subtitles if (request.SubtitleStreamIndex.HasValue) { if (request.SubtitleMethod == SubtitleDeliveryMethod.Encode) { return(false); } } // Source and target codecs must match if (!string.Equals(request.VideoCodec, videoStream.Codec, StringComparison.OrdinalIgnoreCase)) { return(false); } // If client is requesting a specific video profile, it must match the source if (!string.IsNullOrEmpty(request.Profile)) { if (string.IsNullOrEmpty(videoStream.Profile)) { return(false); } if (!string.Equals(request.Profile, videoStream.Profile, StringComparison.OrdinalIgnoreCase)) { var currentScore = GetVideoProfileScore(videoStream.Profile); var requestedScore = GetVideoProfileScore(request.Profile); if (currentScore == -1 || currentScore > requestedScore) { return(false); } } } // Video width must fall within requested value if (request.MaxWidth.HasValue) { if (!videoStream.Width.HasValue || videoStream.Width.Value > request.MaxWidth.Value) { return(false); } } // Video height must fall within requested value if (request.MaxHeight.HasValue) { if (!videoStream.Height.HasValue || videoStream.Height.Value > request.MaxHeight.Value) { return(false); } } // Video framerate must fall within requested value var requestedFramerate = request.MaxFramerate ?? request.Framerate; if (requestedFramerate.HasValue) { var videoFrameRate = videoStream.AverageFrameRate ?? videoStream.RealFrameRate; if (!videoFrameRate.HasValue || videoFrameRate.Value > requestedFramerate.Value) { return(false); } } // Video bitrate must fall within requested value if (request.VideoBitRate.HasValue) { if (!videoStream.BitRate.HasValue || videoStream.BitRate.Value > request.VideoBitRate.Value) { return(false); } } if (request.MaxVideoBitDepth.HasValue) { if (videoStream.BitDepth.HasValue && videoStream.BitDepth.Value > request.MaxVideoBitDepth.Value) { return(false); } } if (request.MaxRefFrames.HasValue) { if (videoStream.RefFrames.HasValue && videoStream.RefFrames.Value > request.MaxRefFrames.Value) { return(false); } } // If a specific level was requested, the source must match or be less than if (request.Level.HasValue) { if (!videoStream.Level.HasValue) { return(false); } if (videoStream.Level.Value > request.Level.Value) { return(false); } } return(request.EnableAutoStreamCopy); }
public async Task <EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress <double> progress, CancellationToken cancellationToken) { var request = options; if (string.IsNullOrEmpty(request.AudioCodec)) { request.AudioCodec = InferAudioCodec(request.OutputContainer); } var state = new EncodingJob(_logger, _mediaSourceManager) { Options = options, IsVideoRequest = isVideoRequest, Progress = progress }; if (!string.IsNullOrWhiteSpace(request.AudioCodec)) { state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList(); request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault(); } var item = _libraryManager.GetItemById(request.ItemId); state.ItemType = item.GetType().Name; state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); var mediaSources = await _mediaSourceManager.GetPlayackMediaSources(request.ItemId, null, false, new[] { MediaType.Audio, MediaType.Video }, cancellationToken).ConfigureAwait(false); var mediaSource = string.IsNullOrEmpty(request.MediaSourceId) ? mediaSources.First() : mediaSources.First(i => string.Equals(i.Id, request.MediaSourceId)); var videoRequest = state.Options; AttachMediaSourceInfo(state, mediaSource, videoRequest); //var container = Path.GetExtension(state.RequestedUrl); //if (string.IsNullOrEmpty(container)) //{ // container = request.Static ? // state.InputContainer : // (Path.GetExtension(GetOutputFilePath(state)) ?? string.Empty).TrimStart('.'); //} //state.OutputContainer = (container ?? string.Empty).TrimStart('.'); state.OutputAudioBitrate = GetAudioBitrateParam(state.Options, state.AudioStream); state.OutputAudioSampleRate = request.AudioSampleRate; state.OutputAudioCodec = state.Options.AudioCodec; state.OutputAudioChannels = GetNumAudioChannelsParam(state.Options, state.AudioStream, state.OutputAudioCodec); if (videoRequest != null) { state.OutputVideoCodec = state.Options.VideoCodec; state.OutputVideoBitrate = GetVideoBitrateParamValue(state.Options, state.VideoStream); if (state.OutputVideoBitrate.HasValue) { var resolution = ResolutionNormalizer.Normalize( state.VideoStream == null ? (int?)null : state.VideoStream.BitRate, state.OutputVideoBitrate.Value, state.VideoStream == null ? null : state.VideoStream.Codec, state.OutputVideoCodec, videoRequest.MaxWidth, videoRequest.MaxHeight); videoRequest.MaxWidth = resolution.MaxWidth; videoRequest.MaxHeight = resolution.MaxHeight; } } ApplyDeviceProfileSettings(state); if (videoRequest != null) { TryStreamCopy(state, videoRequest); } //state.OutputFilePath = GetOutputFilePath(state); return(state); }
public async Task <EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress <double> progress, CancellationToken cancellationToken) { var request = options; if (string.IsNullOrEmpty(request.AudioCodec)) { request.AudioCodec = InferAudioCodec(request.OutputContainer); } var state = new EncodingJob(_logger, _liveTvManager) { Options = options, IsVideoRequest = isVideoRequest, Progress = progress }; if (!string.IsNullOrWhiteSpace(request.AudioCodec)) { state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList(); request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault(); } var item = _libraryManager.GetItemById(request.ItemId); List <MediaStream> mediaStreams = null; state.ItemType = item.GetType().Name; if (item is ILiveTvRecording) { var recording = await _liveTvManager.GetInternalRecording(request.ItemId, cancellationToken).ConfigureAwait(false); state.VideoType = VideoType.VideoFile; state.IsInputVideo = string.Equals(recording.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); var path = recording.RecordingInfo.Path; var mediaUrl = recording.RecordingInfo.Url; var source = string.IsNullOrEmpty(request.MediaSourceId) ? recording.GetMediaSources(false).First() : _mediaSourceManager.GetStaticMediaSource(recording, request.MediaSourceId, false); mediaStreams = source.MediaStreams; // Just to prevent this from being null and causing other methods to fail state.MediaPath = string.Empty; if (!string.IsNullOrEmpty(path)) { state.MediaPath = path; state.InputProtocol = MediaProtocol.File; } else if (!string.IsNullOrEmpty(mediaUrl)) { state.MediaPath = mediaUrl; state.InputProtocol = MediaProtocol.Http; } state.RunTimeTicks = recording.RunTimeTicks; state.DeInterlace = true; state.OutputAudioSync = "1000"; state.InputVideoSync = "-1"; state.InputAudioSync = "1"; state.InputContainer = recording.Container; state.ReadInputAtNativeFramerate = source.ReadAtNativeFramerate; } else if (item is LiveTvChannel) { var channel = _liveTvManager.GetInternalChannel(request.ItemId); state.VideoType = VideoType.VideoFile; state.IsInputVideo = string.Equals(channel.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); mediaStreams = new List <MediaStream>(); state.DeInterlace = true; // Just to prevent this from being null and causing other methods to fail state.MediaPath = string.Empty; } else { var mediaSources = await _mediaSourceManager.GetPlayackMediaSources(request.ItemId, false, cancellationToken).ConfigureAwait(false); var mediaSource = string.IsNullOrEmpty(request.MediaSourceId) ? mediaSources.First() : mediaSources.First(i => string.Equals(i.Id, request.MediaSourceId)); mediaStreams = mediaSource.MediaStreams; state.MediaPath = mediaSource.Path; state.InputProtocol = mediaSource.Protocol; state.InputContainer = mediaSource.Container; state.InputFileSize = mediaSource.Size; state.InputBitrate = mediaSource.Bitrate; state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate; state.RunTimeTicks = mediaSource.RunTimeTicks; state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders; var video = item as Video; if (video != null) { state.IsInputVideo = true; if (mediaSource.VideoType.HasValue) { state.VideoType = mediaSource.VideoType.Value; } state.IsoType = mediaSource.IsoType; state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList(); if (mediaSource.Timestamp.HasValue) { state.InputTimestamp = mediaSource.Timestamp.Value; } } state.RunTimeTicks = mediaSource.RunTimeTicks; } AttachMediaStreamInfo(state, mediaStreams, request); state.OutputAudioBitrate = GetAudioBitrateParam(request, state.AudioStream); state.OutputAudioSampleRate = request.AudioSampleRate; state.OutputAudioCodec = GetAudioCodec(request); state.OutputAudioChannels = GetNumAudioChannelsParam(request, state.AudioStream, state.OutputAudioCodec); if (isVideoRequest) { state.OutputVideoCodec = GetVideoCodec(request); state.OutputVideoBitrate = GetVideoBitrateParamValue(request, state.VideoStream); if (state.OutputVideoBitrate.HasValue) { var resolution = ResolutionNormalizer.Normalize(state.OutputVideoBitrate.Value, state.OutputVideoCodec, request.MaxWidth, request.MaxHeight); request.MaxWidth = resolution.MaxWidth; request.MaxHeight = resolution.MaxHeight; } } ApplyDeviceProfileSettings(state); if (isVideoRequest) { if (state.VideoStream != null && CanStreamCopyVideo(request, state.VideoStream)) { state.OutputVideoCodec = "copy"; } if (state.AudioStream != null && CanStreamCopyAudio(request, state.AudioStream, state.SupportedAudioCodecs)) { state.OutputAudioCodec = "copy"; } } return(state); }
internal static void AttachMediaSourceInfo(EncodingJob state, MediaSourceInfo mediaSource, EncodingJobOptions videoRequest) { state.MediaPath = mediaSource.Path; state.InputProtocol = mediaSource.Protocol; state.InputContainer = mediaSource.Container; state.InputFileSize = mediaSource.Size; state.InputBitrate = mediaSource.Bitrate; state.RunTimeTicks = mediaSource.RunTimeTicks; state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders; if (mediaSource.VideoType.HasValue) { state.VideoType = mediaSource.VideoType.Value; } state.IsoType = mediaSource.IsoType; state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList(); if (mediaSource.Timestamp.HasValue) { state.InputTimestamp = mediaSource.Timestamp.Value; } state.InputProtocol = mediaSource.Protocol; state.MediaPath = mediaSource.Path; state.RunTimeTicks = mediaSource.RunTimeTicks; state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders; state.InputBitrate = mediaSource.Bitrate; state.InputFileSize = mediaSource.Size; state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate; if (state.ReadInputAtNativeFramerate || mediaSource.Protocol == MediaProtocol.File && string.Equals(mediaSource.Container, "wtv", StringComparison.OrdinalIgnoreCase)) { state.OutputAudioSync = "1000"; state.InputVideoSync = "-1"; state.InputAudioSync = "1"; } if (string.Equals(mediaSource.Container, "wma", StringComparison.OrdinalIgnoreCase)) { // Seeing some stuttering when transcoding wma to audio-only HLS state.InputAudioSync = "1"; } var mediaStreams = mediaSource.MediaStreams; if (videoRequest != null) { if (string.IsNullOrEmpty(videoRequest.VideoCodec)) { videoRequest.VideoCodec = InferVideoCodec(videoRequest.OutputContainer); } state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video); state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false); state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio); if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal) { state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream); } if (state.VideoStream != null && state.VideoStream.IsInterlaced) { state.DeInterlace = true; } EnforceResolutionLimit(state, videoRequest); } else { state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true); } state.MediaSource = mediaSource; }
public async Task <EncodingJob> Start(EncodingJobOptions options, IProgress <double> progress, CancellationToken cancellationToken) { var encodingJob = await new EncodingJobFactory(Logger, LiveTvManager, LibraryManager, ChannelManager) .CreateJob(options, IsVideoEncoder, progress, cancellationToken).ConfigureAwait(false); encodingJob.OutputFilePath = GetOutputFilePath(encodingJob); Directory.CreateDirectory(Path.GetDirectoryName(encodingJob.OutputFilePath)); if (options.Context == EncodingContext.Static && encodingJob.IsInputVideo) { encodingJob.ReadInputAtNativeFramerate = true; } await AcquireResources(encodingJob, cancellationToken).ConfigureAwait(false); var commandLineArgs = GetCommandLineArguments(encodingJob); if (GetEncodingOptions().EnableDebugLogging) { commandLineArgs = "-loglevel debug " + commandLineArgs; } var process = new Process { StartInfo = new ProcessStartInfo { CreateNoWindow = true, UseShellExecute = false, // Must consume both stdout and stderr or deadlocks may occur RedirectStandardOutput = true, RedirectStandardError = true, RedirectStandardInput = true, FileName = MediaEncoder.EncoderPath, Arguments = commandLineArgs, WindowStyle = ProcessWindowStyle.Hidden, ErrorDialog = false }, EnableRaisingEvents = true }; var workingDirectory = GetWorkingDirectory(options); if (!string.IsNullOrWhiteSpace(workingDirectory)) { process.StartInfo.WorkingDirectory = workingDirectory; } OnTranscodeBeginning(encodingJob); var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments; Logger.Info(commandLineLogMessage); var logFilePath = Path.Combine(ConfigurationManager.CommonApplicationPaths.LogDirectoryPath, "transcode-" + Guid.NewGuid() + ".txt"); Directory.CreateDirectory(Path.GetDirectoryName(logFilePath)); // FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory. encodingJob.LogFileStream = FileSystem.GetFileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, true); var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(commandLineLogMessage + Environment.NewLine + Environment.NewLine); await encodingJob.LogFileStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationToken).ConfigureAwait(false); process.Exited += (sender, args) => OnFfMpegProcessExited(process, encodingJob); try { process.Start(); } catch (Exception ex) { Logger.ErrorException("Error starting ffmpeg", ex); OnTranscodeFailedToStart(encodingJob.OutputFilePath, encodingJob); throw; } cancellationToken.Register(() => Cancel(process, encodingJob)); // MUST read both stdout and stderr asynchronously or a deadlock may occurr process.BeginOutputReadLine(); // Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback new JobLogger(Logger).StartStreamingLog(encodingJob, process.StandardError.BaseStream, encodingJob.LogFileStream); // Wait for the file to exist before proceeeding while (!File.Exists(encodingJob.OutputFilePath) && !encodingJob.HasExited) { await Task.Delay(100, cancellationToken).ConfigureAwait(false); } return(encodingJob); }
public async Task <EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress <double> progress, CancellationToken cancellationToken) { var request = options; if (string.IsNullOrEmpty(request.AudioCodec)) { request.AudioCodec = InferAudioCodec(request.OutputContainer); } var state = new EncodingJob(_logger, _mediaSourceManager) { Options = options, IsVideoRequest = isVideoRequest, Progress = progress }; if (!string.IsNullOrWhiteSpace(request.AudioCodec)) { state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList(); request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault(); } var item = _libraryManager.GetItemById(request.ItemId); state.ItemType = item.GetType().Name; state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); var mediaSources = await _mediaSourceManager.GetPlayackMediaSources(request.ItemId, null, false, new[] { MediaType.Audio, MediaType.Video }, cancellationToken).ConfigureAwait(false); var mediaSource = string.IsNullOrEmpty(request.MediaSourceId) ? mediaSources.First() : mediaSources.First(i => string.Equals(i.Id, request.MediaSourceId)); AttachMediaStreamInfo(state, mediaSource, options); state.OutputAudioBitrate = GetAudioBitrateParam(request, state.AudioStream); state.OutputAudioSampleRate = request.AudioSampleRate; state.OutputAudioCodec = GetAudioCodec(request); state.OutputAudioChannels = GetNumAudioChannelsParam(request, state.AudioStream, state.OutputAudioCodec); if (isVideoRequest) { state.OutputVideoCodec = GetVideoCodec(request); state.OutputVideoBitrate = GetVideoBitrateParamValue(request, state.VideoStream); if (state.OutputVideoBitrate.HasValue) { var resolution = ResolutionNormalizer.Normalize(state.OutputVideoBitrate.Value, state.OutputVideoCodec, request.MaxWidth, request.MaxHeight); request.MaxWidth = resolution.MaxWidth; request.MaxHeight = resolution.MaxHeight; } } ApplyDeviceProfileSettings(state); TryStreamCopy(state, request); state.Quality = options.Context == EncodingContext.Static ? EncodingQuality.MaxQuality : GetQualitySetting(); return(state); }