protected string GetInputModifier(StreamState state, bool genPts = true) { var inputModifier = string.Empty; var probeSize = GetProbeSizeArgument(state); inputModifier += " " + probeSize; inputModifier = inputModifier.Trim(); var userAgentParam = GetUserAgentParam(state); if (!string.IsNullOrWhiteSpace(userAgentParam)) { inputModifier += " " + userAgentParam; } inputModifier = inputModifier.Trim(); inputModifier += " " + GetFastSeekCommandLineParameter(state.Request); inputModifier = inputModifier.Trim(); if (state.VideoRequest != null && genPts) { inputModifier += " -fflags +genpts"; } if (!string.IsNullOrEmpty(state.InputAudioSync)) { inputModifier += " -async " + state.InputAudioSync; } if (!string.IsNullOrEmpty(state.InputVideoSync)) { inputModifier += " -vsync " + state.InputVideoSync; } if (state.ReadInputAtNativeFramerate) { inputModifier += " -re"; } return inputModifier; }
private void ApplyDeviceProfileSettings(StreamState state) { var headers = new Dictionary<string, string>(); foreach (var key in Request.Headers.AllKeys) { headers[key] = Request.Headers[key]; } state.DeviceProfile = string.IsNullOrWhiteSpace(state.Request.DeviceProfileId) ? DlnaManager.GetProfile(headers) : DlnaManager.GetProfile(state.Request.DeviceProfileId); var profile = state.DeviceProfile; if (profile == null) { // Don't use settings from the default profile. // Only use a specific profile if it was requested. return; } var audioCodec = state.OutputAudioCodec; if (string.Equals(audioCodec, "copy", StringComparison.OrdinalIgnoreCase) && state.AudioStream != null) { audioCodec = state.AudioStream.Codec; } var videoCodec = state.OutputVideoCodec; if (string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase) && state.VideoStream != null) { videoCodec = state.VideoStream.Codec; } var mediaProfile = state.VideoRequest == null ? profile.GetAudioMediaProfile(state.OutputContainer, audioCodec, state.OutputAudioChannels, state.OutputAudioBitrate) : profile.GetVideoMediaProfile(state.OutputContainer, audioCodec, videoCodec, state.OutputAudioBitrate, state.OutputAudioChannels, state.OutputWidth, state.OutputHeight, state.TargetVideoBitDepth, state.OutputVideoBitrate, state.TargetVideoProfile, state.TargetVideoLevel, state.TargetFramerate, state.TargetPacketLength, state.TargetTimestamp, state.IsTargetAnamorphic); if (mediaProfile != null) { state.MimeType = mediaProfile.MimeType; } var transcodingProfile = state.VideoRequest == null ? profile.GetAudioTranscodingProfile(state.OutputContainer, audioCodec) : profile.GetVideoTranscodingProfile(state.OutputContainer, audioCodec, videoCodec); if (transcodingProfile != null) { state.EstimateContentLength = transcodingProfile.EstimateContentLength; state.EnableMpegtsM2TsMode = transcodingProfile.EnableMpegtsM2TsMode; state.TranscodeSeekInfo = transcodingProfile.TranscodeSeekInfo; if (state.VideoRequest != null && string.IsNullOrWhiteSpace(state.VideoRequest.Profile)) { state.VideoRequest.Profile = transcodingProfile.VideoProfile; } } }
private void AddTimeSeekResponseHeaders(StreamState state, IDictionary<string, string> responseHeaders) { var runtimeSeconds = TimeSpan.FromTicks(state.RunTimeTicks.Value).TotalSeconds.ToString(UsCulture); var startSeconds = TimeSpan.FromTicks(state.Request.StartTimeTicks ?? 0).TotalSeconds.ToString(UsCulture); responseHeaders["TimeSeekRange.dlna.org"] = string.Format("npt={0}-{1}/{1}", startSeconds, runtimeSeconds); responseHeaders["X-AvailableSeekRange"] = string.Format("1 npt={0}-{1}", startSeconds, runtimeSeconds); }
/// <summary> /// Gets the command line arguments. /// </summary> /// <param name="outputPath">The output path.</param> /// <param name="state">The state.</param> /// <returns>System.String.</returns> protected abstract string GetCommandLineArguments(string outputPath, StreamState state);
/// <summary> /// Gets the map args. /// </summary> /// <param name="state">The state.</param> /// <returns>System.String.</returns> protected virtual string GetMapArgs(StreamState state) { // If we don't have known media info // If input is video, use -sn to drop subtitles // Otherwise just return empty if (state.VideoStream == null && state.AudioStream == null) { return state.IsInputVideo ? "-sn" : string.Empty; } // We have media info, but we don't know the stream indexes if (state.VideoStream != null && state.VideoStream.Index == -1) { return "-sn"; } // We have media info, but we don't know the stream indexes if (state.AudioStream != null && state.AudioStream.Index == -1) { return state.IsInputVideo ? "-sn" : string.Empty; } var args = string.Empty; if (state.VideoStream != null) { args += string.Format("-map 0:{0}", state.VideoStream.Index); } else { args += "-map -0:v"; } if (state.AudioStream != null) { args += string.Format(" -map 0:{0}", state.AudioStream.Index); } else { args += " -map -0:a"; } if (state.SubtitleStream == null) { args += " -map -0:s"; } return args; }
/// <summary> /// Gets the internal graphical subtitle param. /// </summary> /// <param name="state">The state.</param> /// <param name="outputVideoCodec">The output video codec.</param> /// <returns>System.String.</returns> protected string GetGraphicalSubtitleParam(StreamState state, string outputVideoCodec) { var outputSizeParam = string.Empty; var request = state.VideoRequest; // Add resolution params, if specified if (request.Width.HasValue || request.Height.HasValue || request.MaxHeight.HasValue || request.MaxWidth.HasValue) { outputSizeParam = GetOutputSizeParam(state, outputVideoCodec, CancellationToken.None).TrimEnd('"'); outputSizeParam = "," + outputSizeParam.Substring(outputSizeParam.IndexOf("scale", StringComparison.OrdinalIgnoreCase)); } var videoSizeParam = string.Empty; if (state.VideoStream != null && state.VideoStream.Width.HasValue && state.VideoStream.Height.HasValue) { videoSizeParam = string.Format(",scale={0}:{1}", state.VideoStream.Width.Value.ToString(UsCulture), state.VideoStream.Height.Value.ToString(UsCulture)); } return string.Format(" -filter_complex \"[0:{0}]format=yuva444p{3},lut=u=128:v=128:y=gammaval(.3)[sub] ; [0:{1}] [sub] overlay{2}\"", state.SubtitleStream.Index, state.VideoStream.Index, outputSizeParam, videoSizeParam); }
/// <summary> /// Starts the FFMPEG. /// </summary> /// <param name="state">The state.</param> /// <param name="outputPath">The output path.</param> /// <param name="cancellationTokenSource">The cancellation token source.</param> /// <returns>Task.</returns> /// <exception cref="System.InvalidOperationException">ffmpeg was not found at + MediaEncoder.EncoderPath</exception> protected async Task StartFfMpeg(StreamState state, string outputPath, CancellationTokenSource cancellationTokenSource) { if (!File.Exists(MediaEncoder.EncoderPath)) { throw new InvalidOperationException("ffmpeg was not found at " + MediaEncoder.EncoderPath); } Directory.CreateDirectory(Path.GetDirectoryName(outputPath)); await AcquireResources(state, cancellationTokenSource).ConfigureAwait(false); var commandLineArgs = GetCommandLineArguments(outputPath, state, true); if (ServerConfigurationManager.Configuration.EnableDebugEncodingLogging) { commandLineArgs = "-loglevel debug " + commandLineArgs; } var process = new Process { StartInfo = new ProcessStartInfo { CreateNoWindow = true, UseShellExecute = false, // Must consume both stdout and stderr or deadlocks may occur RedirectStandardOutput = true, RedirectStandardError = true, RedirectStandardInput = true, FileName = MediaEncoder.EncoderPath, WorkingDirectory = Path.GetDirectoryName(MediaEncoder.EncoderPath), Arguments = commandLineArgs, WindowStyle = ProcessWindowStyle.Hidden, ErrorDialog = false }, EnableRaisingEvents = true }; ApiEntryPoint.Instance.OnTranscodeBeginning(outputPath, TranscodingJobType, process, state.Request.DeviceId, state, cancellationTokenSource); var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments; Logger.Info(commandLineLogMessage); var logFilePath = Path.Combine(ServerConfigurationManager.ApplicationPaths.LogDirectoryPath, "transcode-" + Guid.NewGuid() + ".txt"); Directory.CreateDirectory(Path.GetDirectoryName(logFilePath)); // FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory. state.LogFileStream = FileSystem.GetFileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, true); var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(commandLineLogMessage + Environment.NewLine + Environment.NewLine); await state.LogFileStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationTokenSource.Token).ConfigureAwait(false); process.Exited += (sender, args) => OnFfMpegProcessExited(process, state, outputPath); try { process.Start(); } catch (Exception ex) { Logger.ErrorException("Error starting ffmpeg", ex); ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType, state); throw; } // MUST read both stdout and stderr asynchronously or a deadlock may occurr process.BeginOutputReadLine(); // Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback StartStreamingLog(state, process.StandardError.BaseStream, state.LogFileStream); // Wait for the file to exist before proceeeding while (!File.Exists(outputPath)) { await Task.Delay(100, cancellationTokenSource.Token).ConfigureAwait(false); } }
/// <summary> /// Starts the FFMPEG. /// </summary> /// <param name="state">The state.</param> /// <param name="outputPath">The output path.</param> /// <param name="cancellationTokenSource">The cancellation token source.</param> /// <param name="workingDirectory">The working directory.</param> /// <returns>Task.</returns> protected async Task <TranscodingJob> StartFfMpeg(StreamState state, string outputPath, CancellationTokenSource cancellationTokenSource, string workingDirectory = null) { FileSystem.CreateDirectory(FileSystem.GetDirectoryName(outputPath)); await AcquireResources(state, cancellationTokenSource).ConfigureAwait(false); if (state.VideoRequest != null && !string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase)) { var auth = AuthorizationContext.GetAuthorizationInfo(Request); if (!string.IsNullOrWhiteSpace(auth.UserId)) { var user = UserManager.GetUserById(auth.UserId); if (!user.Policy.EnableVideoPlaybackTranscoding) { ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType, state); throw new ArgumentException("User does not have access to video transcoding"); } } } var transcodingId = Guid.NewGuid().ToString("N"); var commandLineArgs = GetCommandLineArguments(outputPath, state, true); var process = ApiEntryPoint.Instance.ProcessFactory.Create(new ProcessOptions { CreateNoWindow = true, UseShellExecute = false, // Must consume both stdout and stderr or deadlocks may occur //RedirectStandardOutput = true, RedirectStandardError = true, RedirectStandardInput = true, FileName = MediaEncoder.EncoderPath, Arguments = commandLineArgs, IsHidden = true, ErrorDialog = false, EnableRaisingEvents = true, WorkingDirectory = !string.IsNullOrWhiteSpace(workingDirectory) ? workingDirectory : null }); var transcodingJob = ApiEntryPoint.Instance.OnTranscodeBeginning(outputPath, state.Request.PlaySessionId, state.MediaSource.LiveStreamId, transcodingId, TranscodingJobType, process, state.Request.DeviceId, state, cancellationTokenSource); var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments; Logger.Info(commandLineLogMessage); var logFilePrefix = "ffmpeg-transcode"; if (state.VideoRequest != null && string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase) && string.Equals(state.OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase)) { logFilePrefix = "ffmpeg-directstream"; } else if (state.VideoRequest != null && string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase)) { logFilePrefix = "ffmpeg-remux"; } var logFilePath = Path.Combine(ServerConfigurationManager.ApplicationPaths.LogDirectoryPath, logFilePrefix + "-" + Guid.NewGuid() + ".txt"); FileSystem.CreateDirectory(FileSystem.GetDirectoryName(logFilePath)); // FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory. state.LogFileStream = FileSystem.GetFileStream(logFilePath, FileOpenMode.Create, FileAccessMode.Write, FileShareMode.Read, true); var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(Request.AbsoluteUri + Environment.NewLine + Environment.NewLine + JsonSerializer.SerializeToString(state.MediaSource) + Environment.NewLine + Environment.NewLine + commandLineLogMessage + Environment.NewLine + Environment.NewLine); await state.LogFileStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationTokenSource.Token).ConfigureAwait(false); process.Exited += (sender, args) => OnFfMpegProcessExited(process, transcodingJob, state); try { process.Start(); } catch (Exception ex) { Logger.ErrorException("Error starting ffmpeg", ex); ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType, state); throw; } // MUST read both stdout and stderr asynchronously or a deadlock may occurr //process.BeginOutputReadLine(); state.TranscodingJob = transcodingJob; // Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback new JobLogger(Logger).StartStreamingLog(state, process.StandardError.BaseStream, state.LogFileStream); // Wait for the file to exist before proceeeding while (!FileSystem.FileExists(state.WaitForPath ?? outputPath) && !transcodingJob.HasExited) { await Task.Delay(100, cancellationTokenSource.Token).ConfigureAwait(false); } if (state.IsInputVideo && transcodingJob.Type == TranscodingJobType.Progressive && !transcodingJob.HasExited) { await Task.Delay(1000, cancellationTokenSource.Token).ConfigureAwait(false); if (state.ReadInputAtNativeFramerate && !transcodingJob.HasExited) { await Task.Delay(1500, cancellationTokenSource.Token).ConfigureAwait(false); } } if (!transcodingJob.HasExited) { StartThrottler(state, transcodingJob); } return(transcodingJob); }
/// <summary> /// Gets the command line arguments. /// </summary> protected abstract string GetCommandLineArguments(string outputPath, EncodingOptions encodingOptions, StreamState state, bool isEncoding);
/// <summary> /// Gets the state. /// </summary> /// <param name="request">The request.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>StreamState.</returns> protected async Task <StreamState> GetState(StreamRequest request, CancellationToken cancellationToken) { ParseDlnaHeaders(request); if (!string.IsNullOrWhiteSpace(request.Params)) { ParseParams(request); } var url = Request.PathInfo; if (string.IsNullOrEmpty(request.AudioCodec)) { request.AudioCodec = EncodingHelper.InferAudioCodec(url); } var state = new StreamState(MediaSourceManager, Logger, TranscodingJobType) { Request = request, RequestedUrl = url, UserAgent = Request.UserAgent }; var auth = AuthorizationContext.GetAuthorizationInfo(Request); if (!string.IsNullOrWhiteSpace(auth.UserId)) { state.User = UserManager.GetUserById(auth.UserId); } //if ((Request.UserAgent ?? string.Empty).IndexOf("iphone", StringComparison.OrdinalIgnoreCase) != -1 || // (Request.UserAgent ?? string.Empty).IndexOf("ipad", StringComparison.OrdinalIgnoreCase) != -1 || // (Request.UserAgent ?? string.Empty).IndexOf("ipod", StringComparison.OrdinalIgnoreCase) != -1) //{ // state.SegmentLength = 6; //} if (state.VideoRequest != null) { if (!string.IsNullOrWhiteSpace(state.VideoRequest.VideoCodec)) { state.SupportedVideoCodecs = state.VideoRequest.VideoCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList(); state.VideoRequest.VideoCodec = state.SupportedVideoCodecs.FirstOrDefault(); } } if (!string.IsNullOrWhiteSpace(request.AudioCodec)) { state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList(); state.Request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault(i => MediaEncoder.CanEncodeToAudioCodec(i)) ?? state.SupportedAudioCodecs.FirstOrDefault(); } if (!string.IsNullOrWhiteSpace(request.SubtitleCodec)) { state.SupportedSubtitleCodecs = request.SubtitleCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList(); state.Request.SubtitleCodec = state.SupportedSubtitleCodecs.FirstOrDefault(i => MediaEncoder.CanEncodeToSubtitleCodec(i)) ?? state.SupportedSubtitleCodecs.FirstOrDefault(); } var item = LibraryManager.GetItemById(request.Id); state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); MediaSourceInfo mediaSource = null; if (string.IsNullOrWhiteSpace(request.LiveStreamId)) { TranscodingJob currentJob = !string.IsNullOrWhiteSpace(request.PlaySessionId) ? ApiEntryPoint.Instance.GetTranscodingJob(request.PlaySessionId) : null; if (currentJob != null) { mediaSource = currentJob.MediaSource; } if (mediaSource == null) { var mediaSources = (await MediaSourceManager.GetPlayackMediaSources(request.Id, null, false, new[] { MediaType.Audio, MediaType.Video }, cancellationToken).ConfigureAwait(false)).ToList(); mediaSource = string.IsNullOrEmpty(request.MediaSourceId) ? mediaSources.First() : mediaSources.FirstOrDefault(i => string.Equals(i.Id, request.MediaSourceId)); if (mediaSource == null && string.Equals(request.Id, request.MediaSourceId, StringComparison.OrdinalIgnoreCase)) { mediaSource = mediaSources.First(); } } } else { var liveStreamInfo = await MediaSourceManager.GetLiveStreamWithDirectStreamProvider(request.LiveStreamId, cancellationToken).ConfigureAwait(false); mediaSource = liveStreamInfo.Item1; state.DirectStreamProvider = liveStreamInfo.Item2; } var videoRequest = request as VideoStreamRequest; EncodingHelper.AttachMediaSourceInfo(state, mediaSource, url); var container = Path.GetExtension(state.RequestedUrl); if (string.IsNullOrEmpty(container)) { container = request.Container; } if (string.IsNullOrEmpty(container)) { container = request.Static ? state.InputContainer : GetOutputFileExtension(state); } state.OutputContainer = (container ?? string.Empty).TrimStart('.'); state.OutputAudioBitrate = EncodingHelper.GetAudioBitrateParam(state.Request, state.AudioStream); state.OutputAudioSampleRate = request.AudioSampleRate; state.OutputAudioCodec = state.Request.AudioCodec; state.OutputAudioChannels = EncodingHelper.GetNumAudioChannelsParam(state.Request, state.AudioStream, state.OutputAudioCodec); if (videoRequest != null) { state.OutputVideoCodec = state.VideoRequest.VideoCodec; state.OutputVideoBitrate = EncodingHelper.GetVideoBitrateParamValue(state.VideoRequest, state.VideoStream, state.OutputVideoCodec); if (videoRequest != null) { EncodingHelper.TryStreamCopy(state); } if (state.OutputVideoBitrate.HasValue && !string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase)) { var resolution = ResolutionNormalizer.Normalize( state.VideoStream == null ? (int?)null : state.VideoStream.BitRate, state.OutputVideoBitrate.Value, state.VideoStream == null ? null : state.VideoStream.Codec, state.OutputVideoCodec, videoRequest.MaxWidth, videoRequest.MaxHeight); videoRequest.MaxWidth = resolution.MaxWidth; videoRequest.MaxHeight = resolution.MaxHeight; } ApplyDeviceProfileSettings(state); } else { ApplyDeviceProfileSettings(state); } var ext = string.IsNullOrWhiteSpace(state.OutputContainer) ? GetOutputFileExtension(state) : ("." + state.OutputContainer); state.OutputFilePath = GetOutputFilePath(state, ext); return(state); }
private Task ReportUsageInternal(StreamState state) { if (!ServerConfigurationManager.Configuration.EnableAnonymousUsageReporting) { return(Task.FromResult(true)); } if (!MediaEncoder.IsDefaultEncoderPath) { return(Task.FromResult(true)); } return(Task.FromResult(true)); //var dict = new Dictionary<string, string>(); //var outputAudio = GetAudioEncoder(state); //if (!string.IsNullOrWhiteSpace(outputAudio)) //{ // dict["outputAudio"] = outputAudio; //} //var outputVideo = GetVideoEncoder(state); //if (!string.IsNullOrWhiteSpace(outputVideo)) //{ // dict["outputVideo"] = outputVideo; //} //if (ServerConfigurationManager.Configuration.CodecsUsed.Contains(outputAudio ?? string.Empty, StringComparer.OrdinalIgnoreCase) && // ServerConfigurationManager.Configuration.CodecsUsed.Contains(outputVideo ?? string.Empty, StringComparer.OrdinalIgnoreCase)) //{ // return Task.FromResult(true); //} //dict["id"] = AppHost.SystemId; //dict["type"] = state.VideoRequest == null ? "Audio" : "Video"; //var audioStream = state.AudioStream; //if (audioStream != null && !string.IsNullOrWhiteSpace(audioStream.Codec)) //{ // dict["inputAudio"] = audioStream.Codec; //} //var videoStream = state.VideoStream; //if (videoStream != null && !string.IsNullOrWhiteSpace(videoStream.Codec)) //{ // dict["inputVideo"] = videoStream.Codec; //} //var cert = GetType().Assembly.GetModules().First().GetSignerCertificate(); //if (cert != null) //{ // dict["assemblySig"] = cert.GetCertHashString(); // dict["certSubject"] = cert.Subject ?? string.Empty; // dict["certIssuer"] = cert.Issuer ?? string.Empty; //} //else //{ // return Task.FromResult(true); //} //if (state.SupportedAudioCodecs.Count > 0) //{ // dict["supportedAudioCodecs"] = string.Join(",", state.SupportedAudioCodecs.ToArray()); //} //var auth = AuthorizationContext.GetAuthorizationInfo(Request); //dict["appName"] = auth.Client ?? string.Empty; //dict["appVersion"] = auth.Version ?? string.Empty; //dict["device"] = auth.Device ?? string.Empty; //dict["deviceId"] = auth.DeviceId ?? string.Empty; //dict["context"] = "streaming"; ////Logger.Info(JsonSerializer.SerializeToString(dict)); //if (!ServerConfigurationManager.Configuration.CodecsUsed.Contains(outputAudio ?? string.Empty, StringComparer.OrdinalIgnoreCase)) //{ // var list = ServerConfigurationManager.Configuration.CodecsUsed.ToList(); // list.Add(outputAudio); // ServerConfigurationManager.Configuration.CodecsUsed = list.ToArray(); //} //if (!ServerConfigurationManager.Configuration.CodecsUsed.Contains(outputVideo ?? string.Empty, StringComparer.OrdinalIgnoreCase)) //{ // var list = ServerConfigurationManager.Configuration.CodecsUsed.ToList(); // list.Add(outputVideo); // ServerConfigurationManager.Configuration.CodecsUsed = list.ToArray(); //} //ServerConfigurationManager.SaveConfiguration(); ////Logger.Info(JsonSerializer.SerializeToString(dict)); //var options = new HttpRequestOptions() //{ // Url = "https://mb3admin.com/admin/service/transcoding/report", // CancellationToken = CancellationToken.None, // LogRequest = false, // LogErrors = false, // BufferContent = false //}; //options.RequestContent = JsonSerializer.SerializeToString(dict); //options.RequestContentType = "application/json"; //return HttpClient.Post(options); }
/// <summary> /// Gets the command line arguments. /// </summary> /// <param name="outputPath">The output path.</param> /// <param name="state">The state.</param> /// <param name="performSubtitleConversions">if set to <c>true</c> [perform subtitle conversions].</param> /// <returns>System.String.</returns> protected abstract string GetCommandLineArguments(string outputPath, StreamState state, bool performSubtitleConversions);
/// <summary> /// Starts the FFMPEG. /// </summary> /// <param name="state">The state.</param> /// <param name="outputPath">The output path.</param> /// <returns>Task.</returns> protected async Task StartFfMpeg(StreamState state, string outputPath) { var parentPath = Path.GetDirectoryName(outputPath); if (!Directory.Exists(parentPath)) { Directory.CreateDirectory(parentPath); } var video = state.Item as Video; if (video != null && video.VideoType == VideoType.Iso && video.IsoType.HasValue && IsoManager.CanMount(video.Path)) { state.IsoMount = await IsoManager.Mount(video.Path, CancellationToken.None).ConfigureAwait(false); } var process = new Process { StartInfo = new ProcessStartInfo { CreateNoWindow = true, UseShellExecute = false, // Must consume both stdout and stderr or deadlocks may occur RedirectStandardOutput = true, RedirectStandardError = true, FileName = MediaEncoder.EncoderPath, WorkingDirectory = Path.GetDirectoryName(MediaEncoder.EncoderPath), Arguments = GetCommandLineArguments(outputPath, state, true), WindowStyle = ProcessWindowStyle.Hidden, ErrorDialog = false }, EnableRaisingEvents = true }; ApiEntryPoint.Instance.OnTranscodeBeginning(outputPath, TranscodingJobType, process, video != null, state.Request.StartTimeTicks); Logger.Info(process.StartInfo.FileName + " " + process.StartInfo.Arguments); var logFilePath = Path.Combine(ApplicationPaths.LogDirectoryPath, "ffmpeg-" + Guid.NewGuid() + ".txt"); // FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory. state.LogFileStream = new FileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, StreamDefaults.DefaultFileStreamBufferSize, FileOptions.Asynchronous); process.Exited += (sender, args) => OnFfMpegProcessExited(process, state); try { process.Start(); } catch (Win32Exception ex) { Logger.ErrorException("Error starting ffmpeg", ex); ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType); state.LogFileStream.Dispose(); throw; } // MUST read both stdout and stderr asynchronously or a deadlock may occurr process.BeginOutputReadLine(); // Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback process.StandardError.BaseStream.CopyToAsync(state.LogFileStream); // Wait for the file to exist before proceeeding while (!File.Exists(outputPath)) { await Task.Delay(100).ConfigureAwait(false); } // Allow a small amount of time to buffer a little if (state.Item is Video) { await Task.Delay(500).ConfigureAwait(false); } // This is arbitrary, but add a little buffer time when internet streaming if (state.Item.LocationType == LocationType.Remote) { await Task.Delay(2000).ConfigureAwait(false); } }
/// <summary> /// If we're going to put a fixed size on the command line, this will calculate it /// </summary> /// <param name="state">The state.</param> /// <param name="outputVideoCodec">The output video codec.</param> /// <param name="performTextSubtitleConversion">if set to <c>true</c> [perform text subtitle conversion].</param> /// <returns>System.String.</returns> protected string GetOutputSizeParam(StreamState state, string outputVideoCodec, bool performTextSubtitleConversion) { // http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/ var assSubtitleParam = string.Empty; var request = state.VideoRequest; if (state.SubtitleStream != null) { if (state.SubtitleStream.Codec.IndexOf("srt", StringComparison.OrdinalIgnoreCase) != -1 || state.SubtitleStream.Codec.IndexOf("subrip", StringComparison.OrdinalIgnoreCase) != -1) { assSubtitleParam = GetTextSubtitleParam((Video)state.Item, state.SubtitleStream, request.StartTimeTicks, performTextSubtitleConversion); } } // If fixed dimensions were supplied if (request.Width.HasValue && request.Height.HasValue) { return(string.Format(" -vf \"scale={0}:{1}{2}\"", request.Width.Value, request.Height.Value, assSubtitleParam)); } var isH264Output = outputVideoCodec.Equals("libx264", StringComparison.OrdinalIgnoreCase); // If a fixed width was requested if (request.Width.HasValue) { return(isH264Output ? string.Format(" -vf \"scale={0}:trunc(ow/a/2)*2{1}\"", request.Width.Value, assSubtitleParam) : string.Format(" -vf \"scale={0}:-1{1}\"", request.Width.Value, assSubtitleParam)); } // If a max width was requested if (request.MaxWidth.HasValue && (!request.MaxHeight.HasValue || state.VideoStream == null)) { return(isH264Output ? string.Format(" -vf \"scale=min(iw\\,{0}):trunc(ow/a/2)*2{1}\"", request.MaxWidth.Value, assSubtitleParam) : string.Format(" -vf \"scale=min(iw\\,{0}):-1{1}\"", request.MaxWidth.Value, assSubtitleParam)); } if (state.VideoStream == null) { // No way to figure this out return(string.Empty); } // Need to perform calculations manually // Try to account for bad media info var currentHeight = state.VideoStream.Height ?? request.MaxHeight ?? request.Height ?? 0; var currentWidth = state.VideoStream.Width ?? request.MaxWidth ?? request.Width ?? 0; var outputSize = DrawingUtils.Resize(currentWidth, currentHeight, request.Width, request.Height, request.MaxWidth, request.MaxHeight); // If we're encoding with libx264, it can't handle odd numbered widths or heights, so we'll have to fix that if (isH264Output) { return(string.Format(" -vf \"scale=trunc({0}/2)*2:trunc({1}/2)*2{2}\"", outputSize.Width, outputSize.Height, assSubtitleParam)); } // Otherwise use -vf scale since ffmpeg will ensure internally that the aspect ratio is preserved return(string.Format(" -vf \"scale={0}:-1{1}\"", Convert.ToInt32(outputSize.Width), assSubtitleParam)); }
/// <summary> /// Gets the video bitrate to specify on the command line /// </summary> /// <param name="state">The state.</param> /// <param name="videoCodec">The video codec.</param> /// <param name="isHls">if set to <c>true</c> [is HLS].</param> /// <returns>System.String.</returns> protected string GetVideoQualityParam(StreamState state, string videoCodec, bool isHls) { var param = string.Empty; var isVc1 = state.VideoStream != null && string.Equals(state.VideoStream.Codec, "vc1", StringComparison.OrdinalIgnoreCase); var qualitySetting = GetQualitySetting(); if (string.Equals(videoCodec, "libx264", StringComparison.OrdinalIgnoreCase)) { switch (qualitySetting) { case EncodingQuality.HighSpeed: param = "-preset superfast"; break; case EncodingQuality.HighQuality: param = "-preset superfast"; break; case EncodingQuality.MaxQuality: param = "-preset superfast"; break; } switch (qualitySetting) { case EncodingQuality.HighSpeed: param += " -crf 23"; break; case EncodingQuality.HighQuality: param += " -crf 20"; break; case EncodingQuality.MaxQuality: param += " -crf 18"; break; } } // webm else if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase)) { // Values 0-3, 0 being highest quality but slower var profileScore = 0; string crf; switch (qualitySetting) { case EncodingQuality.HighSpeed: crf = "12"; profileScore = 2; break; case EncodingQuality.HighQuality: crf = "8"; profileScore = 1; break; case EncodingQuality.MaxQuality: crf = "4"; break; default: throw new ArgumentException("Unrecognized quality setting"); } if (isVc1) { profileScore++; // Max of 2 profileScore = Math.Min(profileScore, 2); } // http://www.webmproject.org/docs/encoder-parameters/ param = string.Format("-speed 16 -quality good -profile:v {0} -slices 8 -crf {1}", profileScore.ToString(UsCulture), crf); } else if (string.Equals(videoCodec, "mpeg4", StringComparison.OrdinalIgnoreCase)) { param = "-mbd rd -flags +mv4+aic -trellis 2 -cmp 2 -subcmp 2 -bf 2"; } // asf/wmv else if (string.Equals(videoCodec, "wmv2", StringComparison.OrdinalIgnoreCase)) { param = "-qmin 2"; } else if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase)) { param = "-mbd 2"; } param += GetVideoBitrateParam(state, videoCodec, isHls); var framerate = GetFramerateParam(state); if (framerate.HasValue) { param += string.Format(" -r {0}", framerate.Value.ToString(UsCulture)); } if (!string.IsNullOrEmpty(state.OutputVideoSync)) { param += " -vsync " + state.OutputVideoSync; } if (!string.IsNullOrEmpty(state.VideoRequest.Profile)) { param += " -profile:v " + state.VideoRequest.Profile; } if (!string.IsNullOrEmpty(state.VideoRequest.Level)) { param += " -level " + state.VideoRequest.Level; } return param; }
/// <summary> /// Gets the output file extension. /// </summary> /// <param name="state">The state.</param> /// <returns>System.String.</returns> protected virtual string GetOutputFileExtension(StreamState state) { return(Path.GetExtension(state.RequestedUrl)); }
/// <summary> /// If we're going to put a fixed size on the command line, this will calculate it /// </summary> /// <param name="state">The state.</param> /// <param name="outputVideoCodec">The output video codec.</param> /// <param name="allowTimeStampCopy">if set to <c>true</c> [allow time stamp copy].</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>System.String.</returns> protected string GetOutputSizeParam(StreamState state, string outputVideoCodec, CancellationToken cancellationToken, bool allowTimeStampCopy = true) { // http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/ var request = state.VideoRequest; var filters = new List<string>(); if (state.DeInterlace) { filters.Add("yadif=0:-1:0"); } // If fixed dimensions were supplied if (request.Width.HasValue && request.Height.HasValue) { var widthParam = request.Width.Value.ToString(UsCulture); var heightParam = request.Height.Value.ToString(UsCulture); filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", widthParam, heightParam)); } // If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size else if (request.MaxWidth.HasValue && request.MaxHeight.HasValue) { var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture); var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture); filters.Add(string.Format("scale=trunc(min(iw\\,{0})/2)*2:trunc(min((iw/dar)\\,{1})/2)*2", maxWidthParam, maxHeightParam)); } // If a fixed width was requested else if (request.Width.HasValue) { var widthParam = request.Width.Value.ToString(UsCulture); filters.Add(string.Format("scale={0}:trunc(ow/a/2)*2", widthParam)); } // If a fixed height was requested else if (request.Height.HasValue) { var heightParam = request.Height.Value.ToString(UsCulture); filters.Add(string.Format("scale=trunc(oh*a*2)/2:{0}", heightParam)); } // If a max width was requested else if (request.MaxWidth.HasValue && (!request.MaxHeight.HasValue || state.VideoStream == null)) { var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture); filters.Add(string.Format("scale=min(iw\\,{0}):trunc(ow/dar/2)*2", maxWidthParam)); } // If a max height was requested else if (request.MaxHeight.HasValue && (!request.MaxWidth.HasValue || state.VideoStream == null)) { var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture); filters.Add(string.Format("scale=trunc(oh*a*2)/2:min(ih\\,{0})", maxHeightParam)); } else if (request.MaxWidth.HasValue || request.MaxHeight.HasValue || request.Width.HasValue || request.Height.HasValue) { if (state.VideoStream != null) { // Need to perform calculations manually // Try to account for bad media info var currentHeight = state.VideoStream.Height ?? request.MaxHeight ?? request.Height ?? 0; var currentWidth = state.VideoStream.Width ?? request.MaxWidth ?? request.Width ?? 0; var outputSize = DrawingUtils.Resize(currentWidth, currentHeight, request.Width, request.Height, request.MaxWidth, request.MaxHeight); var manualWidthParam = outputSize.Width.ToString(UsCulture); var manualHeightParam = outputSize.Height.ToString(UsCulture); filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", manualWidthParam, manualHeightParam)); } } var output = string.Empty; if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream) { var subParam = GetTextSubtitleParam(state, cancellationToken); filters.Add(subParam); if (allowTimeStampCopy) { output += " -copyts"; } } if (filters.Count > 0) { output += string.Format(" -vf \"{0}\"", string.Join(",", filters.ToArray())); } return output; }
/// <summary> /// Starts the FFMPEG. /// </summary> /// <param name="state">The state.</param> /// <param name="outputPath">The output path.</param> /// <param name="cancellationTokenSource">The cancellation token source.</param> /// <param name="workingDirectory">The working directory.</param> /// <returns>Task.</returns> protected async Task <TranscodingJob> StartFfMpeg( StreamState state, string outputPath, CancellationTokenSource cancellationTokenSource, string workingDirectory = null) { Directory.CreateDirectory(Path.GetDirectoryName(outputPath)); await AcquireResources(state, cancellationTokenSource).ConfigureAwait(false); if (state.VideoRequest != null && !string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase)) { var auth = AuthorizationContext.GetAuthorizationInfo(Request); if (auth.User != null && !auth.User.Policy.EnableVideoPlaybackTranscoding) { ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType, state); throw new ArgumentException("User does not have access to video transcoding"); } } var encodingOptions = ServerConfigurationManager.GetEncodingOptions(); var process = new Process() { StartInfo = new ProcessStartInfo() { WindowStyle = ProcessWindowStyle.Hidden, CreateNoWindow = true, UseShellExecute = false, // Must consume both stdout and stderr or deadlocks may occur //RedirectStandardOutput = true, RedirectStandardError = true, RedirectStandardInput = true, FileName = MediaEncoder.EncoderPath, Arguments = GetCommandLineArguments(outputPath, encodingOptions, state, true), WorkingDirectory = string.IsNullOrWhiteSpace(workingDirectory) ? null : workingDirectory, ErrorDialog = false }, EnableRaisingEvents = true }; var transcodingJob = ApiEntryPoint.Instance.OnTranscodeBeginning(outputPath, state.Request.PlaySessionId, state.MediaSource.LiveStreamId, Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), TranscodingJobType, process, state.Request.DeviceId, state, cancellationTokenSource); var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments; Logger.LogInformation(commandLineLogMessage); var logFilePrefix = "ffmpeg-transcode"; if (state.VideoRequest != null && string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase)) { if (string.Equals(state.OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase)) { logFilePrefix = "ffmpeg-directstream"; } else { logFilePrefix = "ffmpeg-remux"; } } var logFilePath = Path.Combine(ServerConfigurationManager.ApplicationPaths.LogDirectoryPath, logFilePrefix + "-" + Guid.NewGuid() + ".txt"); // FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory. Stream logStream = new FileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, IODefaults.FileStreamBufferSize, true); var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(Request.AbsoluteUri + Environment.NewLine + Environment.NewLine + JsonSerializer.SerializeToString(state.MediaSource) + Environment.NewLine + Environment.NewLine + commandLineLogMessage + Environment.NewLine + Environment.NewLine); await logStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationTokenSource.Token).ConfigureAwait(false); process.Exited += (sender, args) => OnFfMpegProcessExited(process, transcodingJob, state); try { process.Start(); } catch (Exception ex) { Logger.LogError(ex, "Error starting ffmpeg"); ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType, state); throw; } Logger.LogDebug("Launched ffmpeg process"); state.TranscodingJob = transcodingJob; // Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback _ = new JobLogger(Logger).StartStreamingLog(state, process.StandardError.BaseStream, logStream); // Wait for the file to exist before proceeeding var ffmpegTargetFile = state.WaitForPath ?? outputPath; Logger.LogDebug("Waiting for the creation of {0}", ffmpegTargetFile); while (!File.Exists(ffmpegTargetFile) && !transcodingJob.HasExited) { await Task.Delay(100, cancellationTokenSource.Token).ConfigureAwait(false); } Logger.LogDebug("File {0} created or transcoding has finished", ffmpegTargetFile); if (state.IsInputVideo && transcodingJob.Type == TranscodingJobType.Progressive && !transcodingJob.HasExited) { await Task.Delay(1000, cancellationTokenSource.Token).ConfigureAwait(false); if (state.ReadInputAtNativeFramerate && !transcodingJob.HasExited) { await Task.Delay(1500, cancellationTokenSource.Token).ConfigureAwait(false); } } if (!transcodingJob.HasExited) { StartThrottler(state, transcodingJob); } Logger.LogDebug("StartFfMpeg() finished successfully"); return(transcodingJob); }
/// <summary> /// Gets the input argument. /// </summary> /// <param name="state">The state.</param> /// <returns>System.String.</returns> protected virtual string GetInputArgument(StreamState state) { var protocol = state.InputProtocol; var inputPath = new[] { state.MediaPath }; if (state.IsInputVideo) { if (!(state.VideoType == VideoType.Iso && state.IsoMount == null)) { inputPath = MediaEncoderHelpers.GetInputArgument(state.MediaPath, state.InputProtocol, state.IsoMount, state.PlayableStreamFileNames); } } return MediaEncoder.GetInputArgument(inputPath, protocol); }
/// <summary> /// Gets the state. /// </summary> /// <param name="request">The request.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>StreamState.</returns> protected async Task <StreamState> GetState(StreamRequest request, CancellationToken cancellationToken) { ParseDlnaHeaders(request); if (!string.IsNullOrWhiteSpace(request.Params)) { ParseParams(request); } ParseStreamOptions(request); var url = Request.PathInfo; if (string.IsNullOrEmpty(request.AudioCodec)) { request.AudioCodec = EncodingHelper.InferAudioCodec(url); } var enableDlnaHeaders = !string.IsNullOrWhiteSpace(request.Params) || string.Equals(GetHeader("GetContentFeatures.DLNA.ORG"), "1", StringComparison.OrdinalIgnoreCase); var state = new StreamState(MediaSourceManager, TranscodingJobType) { Request = request, RequestedUrl = url, UserAgent = Request.UserAgent, EnableDlnaHeaders = enableDlnaHeaders }; var auth = AuthorizationContext.GetAuthorizationInfo(Request); if (!auth.UserId.Equals(Guid.Empty)) { state.User = UserManager.GetUserById(auth.UserId); } //if ((Request.UserAgent ?? string.Empty).IndexOf("iphone", StringComparison.OrdinalIgnoreCase) != -1 || // (Request.UserAgent ?? string.Empty).IndexOf("ipad", StringComparison.OrdinalIgnoreCase) != -1 || // (Request.UserAgent ?? string.Empty).IndexOf("ipod", StringComparison.OrdinalIgnoreCase) != -1) //{ // state.SegmentLength = 6; //} if (state.VideoRequest != null && !string.IsNullOrWhiteSpace(state.VideoRequest.VideoCodec)) { state.SupportedVideoCodecs = state.VideoRequest.VideoCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToArray(); state.VideoRequest.VideoCodec = state.SupportedVideoCodecs.FirstOrDefault(); } if (!string.IsNullOrWhiteSpace(request.AudioCodec)) { state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToArray(); state.Request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault(i => MediaEncoder.CanEncodeToAudioCodec(i)) ?? state.SupportedAudioCodecs.FirstOrDefault(); } if (!string.IsNullOrWhiteSpace(request.SubtitleCodec)) { state.SupportedSubtitleCodecs = request.SubtitleCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToArray(); state.Request.SubtitleCodec = state.SupportedSubtitleCodecs.FirstOrDefault(i => MediaEncoder.CanEncodeToSubtitleCodec(i)) ?? state.SupportedSubtitleCodecs.FirstOrDefault(); } var item = LibraryManager.GetItemById(request.Id); state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); //var primaryImage = item.GetImageInfo(ImageType.Primary, 0) ?? // item.Parents.Select(i => i.GetImageInfo(ImageType.Primary, 0)).FirstOrDefault(i => i != null); //if (primaryImage != null) //{ // state.AlbumCoverPath = primaryImage.Path; //} MediaSourceInfo mediaSource = null; if (string.IsNullOrWhiteSpace(request.LiveStreamId)) { var currentJob = !string.IsNullOrWhiteSpace(request.PlaySessionId) ? ApiEntryPoint.Instance.GetTranscodingJob(request.PlaySessionId) : null; if (currentJob != null) { mediaSource = currentJob.MediaSource; } if (mediaSource == null) { var mediaSources = await MediaSourceManager.GetPlaybackMediaSources(LibraryManager.GetItemById(request.Id), null, false, false, cancellationToken).ConfigureAwait(false); mediaSource = string.IsNullOrEmpty(request.MediaSourceId) ? mediaSources[0] : mediaSources.Find(i => string.Equals(i.Id, request.MediaSourceId)); if (mediaSource == null && Guid.Parse(request.MediaSourceId) == request.Id) { mediaSource = mediaSources[0]; } } } else { var liveStreamInfo = await MediaSourceManager.GetLiveStreamWithDirectStreamProvider(request.LiveStreamId, cancellationToken).ConfigureAwait(false); mediaSource = liveStreamInfo.Item1; state.DirectStreamProvider = liveStreamInfo.Item2; } var videoRequest = request as VideoStreamRequest; EncodingHelper.AttachMediaSourceInfo(state, mediaSource, url); var container = Path.GetExtension(state.RequestedUrl); if (string.IsNullOrEmpty(container)) { container = request.Container; } if (string.IsNullOrEmpty(container)) { container = request.Static ? StreamBuilder.NormalizeMediaSourceFormatIntoSingleContainer(state.InputContainer, state.MediaPath, null, DlnaProfileType.Audio) : GetOutputFileExtension(state); } state.OutputContainer = (container ?? string.Empty).TrimStart('.'); state.OutputAudioBitrate = EncodingHelper.GetAudioBitrateParam(state.Request, state.AudioStream); state.OutputAudioCodec = state.Request.AudioCodec; state.OutputAudioChannels = EncodingHelper.GetNumAudioChannelsParam(state, state.AudioStream, state.OutputAudioCodec); if (videoRequest != null) { state.OutputVideoCodec = state.VideoRequest.VideoCodec; state.OutputVideoBitrate = EncodingHelper.GetVideoBitrateParamValue(state.VideoRequest, state.VideoStream, state.OutputVideoCodec); if (videoRequest != null) { EncodingHelper.TryStreamCopy(state); } if (state.OutputVideoBitrate.HasValue && !string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase)) { var resolution = ResolutionNormalizer.Normalize( state.VideoStream?.BitRate, state.VideoStream?.Width, state.VideoStream?.Height, state.OutputVideoBitrate.Value, state.VideoStream?.Codec, state.OutputVideoCodec, videoRequest.MaxWidth, videoRequest.MaxHeight); videoRequest.MaxWidth = resolution.MaxWidth; videoRequest.MaxHeight = resolution.MaxHeight; } } ApplyDeviceProfileSettings(state); var ext = string.IsNullOrWhiteSpace(state.OutputContainer) ? GetOutputFileExtension(state) : ('.' + state.OutputContainer); var encodingOptions = ServerConfigurationManager.GetEncodingOptions(); state.OutputFilePath = GetOutputFilePath(state, encodingOptions, ext); return(state); }
/// <summary> /// Gets the command line arguments. /// </summary> /// <param name="outputPath">The output path.</param> /// <param name="state">The state.</param> /// <param name="isEncoding">if set to <c>true</c> [is encoding].</param> /// <returns>System.String.</returns> protected abstract string GetCommandLineArguments(string outputPath, StreamState state, bool isEncoding);
private void ApplyDeviceProfileSettings(StreamState state) { var headers = Request.Headers; if (!string.IsNullOrWhiteSpace(state.Request.DeviceProfileId)) { state.DeviceProfile = DlnaManager.GetProfile(state.Request.DeviceProfileId); } else { if (!string.IsNullOrWhiteSpace(state.Request.DeviceId)) { var caps = DeviceManager.GetCapabilities(state.Request.DeviceId); if (caps != null) { state.DeviceProfile = caps.DeviceProfile; } else { state.DeviceProfile = DlnaManager.GetProfile(headers); } } } var profile = state.DeviceProfile; if (profile == null) { // Don't use settings from the default profile. // Only use a specific profile if it was requested. return; } var audioCodec = state.ActualOutputAudioCodec; var videoCodec = state.ActualOutputVideoCodec; var mediaProfile = state.VideoRequest == null? profile.GetAudioMediaProfile(state.OutputContainer, audioCodec, state.OutputAudioChannels, state.OutputAudioBitrate, state.OutputAudioSampleRate, state.OutputAudioBitDepth) : profile.GetVideoMediaProfile(state.OutputContainer, audioCodec, videoCodec, state.OutputWidth, state.OutputHeight, state.TargetVideoBitDepth, state.OutputVideoBitrate, state.TargetVideoProfile, state.TargetVideoLevel, state.TargetFramerate, state.TargetPacketLength, state.TargetTimestamp, state.IsTargetAnamorphic, state.IsTargetInterlaced, state.TargetRefFrames, state.TargetVideoStreamCount, state.TargetAudioStreamCount, state.TargetVideoCodecTag, state.IsTargetAVC); if (mediaProfile != null) { state.MimeType = mediaProfile.MimeType; } if (!state.Request.Static) { var transcodingProfile = state.VideoRequest == null? profile.GetAudioTranscodingProfile(state.OutputContainer, audioCodec) : profile.GetVideoTranscodingProfile(state.OutputContainer, audioCodec, videoCodec); if (transcodingProfile != null) { state.EstimateContentLength = transcodingProfile.EstimateContentLength; //state.EnableMpegtsM2TsMode = transcodingProfile.EnableMpegtsM2TsMode; state.TranscodeSeekInfo = transcodingProfile.TranscodeSeekInfo; if (state.VideoRequest != null) { state.VideoRequest.CopyTimestamps = transcodingProfile.CopyTimestamps; state.VideoRequest.EnableSubtitlesInManifest = transcodingProfile.EnableSubtitlesInManifest; } } } }
/// <summary> /// Gets the state. /// </summary> /// <param name="request">The request.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>StreamState.</returns> protected async Task<StreamState> GetState(StreamRequest request, CancellationToken cancellationToken) { ParseDlnaHeaders(request); if (!string.IsNullOrWhiteSpace(request.Params)) { ParseParams(request); } var url = Request.PathInfo; if (string.IsNullOrEmpty(request.AudioCodec)) { request.AudioCodec = InferAudioCodec(url); } var state = new StreamState(LiveTvManager, Logger) { Request = request, RequestedUrl = url }; if (!string.IsNullOrWhiteSpace(request.AudioCodec)) { state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList(); state.Request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault(); } var item = LibraryManager.GetItemById(request.Id); List<MediaStream> mediaStreams = null; state.ItemType = item.GetType().Name; if (item is ILiveTvRecording) { var recording = await LiveTvManager.GetInternalRecording(request.Id, cancellationToken).ConfigureAwait(false); state.VideoType = VideoType.VideoFile; state.IsInputVideo = string.Equals(recording.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); var path = recording.RecordingInfo.Path; var mediaUrl = recording.RecordingInfo.Url; var source = string.IsNullOrEmpty(request.MediaSourceId) ? recording.GetMediaSources(false).First() : recording.GetMediaSources(false).First(i => string.Equals(i.Id, request.MediaSourceId)); mediaStreams = source.MediaStreams; // Just to prevent this from being null and causing other methods to fail state.MediaPath = string.Empty; if (!string.IsNullOrEmpty(path)) { state.MediaPath = path; state.InputProtocol = MediaProtocol.File; } else if (!string.IsNullOrEmpty(mediaUrl)) { state.MediaPath = mediaUrl; state.InputProtocol = MediaProtocol.Http; } else { // No media info, so this is probably needed state.DeInterlace = true; } if (recording.RecordingInfo.Status == RecordingStatus.InProgress) { state.ReadInputAtNativeFramerate = true; } state.RunTimeTicks = recording.RunTimeTicks; state.OutputAudioSync = "1000"; state.InputVideoSync = "-1"; state.InputAudioSync = "1"; state.InputContainer = recording.Container; } else if (item is LiveTvChannel) { var channel = LiveTvManager.GetInternalChannel(request.Id); state.VideoType = VideoType.VideoFile; state.IsInputVideo = string.Equals(channel.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); mediaStreams = new List<MediaStream>(); state.ReadInputAtNativeFramerate = true; state.OutputAudioSync = "1000"; state.DeInterlace = true; state.InputVideoSync = "-1"; state.InputAudioSync = "1"; // Just to prevent this from being null and causing other methods to fail state.MediaPath = string.Empty; } else if (item is IChannelMediaItem) { var mediaSource = await GetChannelMediaInfo(request.Id, request.MediaSourceId, cancellationToken).ConfigureAwait(false); state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); state.InputProtocol = mediaSource.Protocol; state.MediaPath = mediaSource.Path; state.RunTimeTicks = item.RunTimeTicks; state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders; mediaStreams = mediaSource.MediaStreams; } else { var hasMediaSources = (IHasMediaSources)item; var mediaSource = string.IsNullOrEmpty(request.MediaSourceId) ? hasMediaSources.GetMediaSources(false).First() : hasMediaSources.GetMediaSources(false).First(i => string.Equals(i.Id, request.MediaSourceId)); mediaStreams = mediaSource.MediaStreams; state.MediaPath = mediaSource.Path; state.InputProtocol = mediaSource.Protocol; state.InputContainer = mediaSource.Container; if (item is Video) { state.IsInputVideo = true; if (mediaSource.VideoType.HasValue) { state.VideoType = mediaSource.VideoType.Value; } state.IsoType = mediaSource.IsoType; state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList(); if (mediaSource.Timestamp.HasValue) { state.InputTimestamp = mediaSource.Timestamp.Value; } } state.RunTimeTicks = mediaSource.RunTimeTicks; } // If it's a wtv and we don't have media info, we will probably need to deinterlace if (string.Equals(state.InputContainer, "wtv", StringComparison.OrdinalIgnoreCase) && mediaStreams.Count == 0) { state.DeInterlace = true; } if (state.InputProtocol == MediaProtocol.Rtmp) { state.ReadInputAtNativeFramerate = true; } var videoRequest = request as VideoStreamRequest; AttachMediaStreamInfo(state, mediaStreams, videoRequest, url); state.SegmentLength = state.ReadInputAtNativeFramerate ? 5 : 7; state.HlsListSize = state.ReadInputAtNativeFramerate ? 100 : 1440; var container = Path.GetExtension(state.RequestedUrl); if (string.IsNullOrEmpty(container)) { container = request.Static ? state.InputContainer : Path.GetExtension(GetOutputFilePath(state)); } state.OutputContainer = (container ?? string.Empty).TrimStart('.'); state.OutputAudioBitrate = GetAudioBitrateParam(state.Request, state.AudioStream); state.OutputAudioSampleRate = request.AudioSampleRate; state.OutputAudioCodec = GetAudioCodec(state.Request); state.OutputAudioChannels = GetNumAudioChannelsParam(state.Request, state.AudioStream, state.OutputAudioCodec); if (videoRequest != null) { state.OutputVideoCodec = GetVideoCodec(videoRequest); state.OutputVideoBitrate = GetVideoBitrateParamValue(state.VideoRequest, state.VideoStream); } ApplyDeviceProfileSettings(state); if (videoRequest != null) { if (state.VideoStream != null && CanStreamCopyVideo(videoRequest, state.VideoStream)) { state.OutputVideoCodec = "copy"; } if (state.AudioStream != null && CanStreamCopyAudio(request, state.AudioStream, state.SupportedAudioCodecs)) { state.OutputAudioCodec = "copy"; } } state.OutputFilePath = GetOutputFilePath(state); return state; }
/// <summary> /// Adds the dlna headers. /// </summary> /// <param name="state">The state.</param> /// <param name="responseHeaders">The response headers.</param> /// <param name="isStaticallyStreamed">if set to <c>true</c> [is statically streamed].</param> /// <returns><c>true</c> if XXXX, <c>false</c> otherwise</returns> protected void AddDlnaHeaders(StreamState state, IDictionary <string, string> responseHeaders, bool isStaticallyStreamed) { if (!state.EnableDlnaHeaders) { return; } var profile = state.DeviceProfile; var transferMode = GetHeader("transferMode.dlna.org"); responseHeaders["transferMode.dlna.org"] = string.IsNullOrEmpty(transferMode) ? "Streaming" : transferMode; responseHeaders["realTimeInfo.dlna.org"] = "DLNA.ORG_TLAG=*"; if (state.RunTimeTicks.HasValue) { if (string.Equals(GetHeader("getMediaInfo.sec"), "1", StringComparison.OrdinalIgnoreCase)) { var ms = TimeSpan.FromTicks(state.RunTimeTicks.Value).TotalMilliseconds; responseHeaders["MediaInfo.sec"] = string.Format( CultureInfo.InvariantCulture, "SEC_Duration={0};", Convert.ToInt32(ms)); } if (!isStaticallyStreamed && profile != null) { AddTimeSeekResponseHeaders(state, responseHeaders); } } if (profile == null) { profile = DlnaManager.GetDefaultProfile(); } var audioCodec = state.ActualOutputAudioCodec; if (state.VideoRequest == null) { responseHeaders["contentFeatures.dlna.org"] = new ContentFeatureBuilder(profile).BuildAudioHeader( state.OutputContainer, audioCodec, state.OutputAudioBitrate, state.OutputAudioSampleRate, state.OutputAudioChannels, state.OutputAudioBitDepth, isStaticallyStreamed, state.RunTimeTicks, state.TranscodeSeekInfo); } else { var videoCodec = state.ActualOutputVideoCodec; responseHeaders["contentFeatures.dlna.org"] = new ContentFeatureBuilder(profile).BuildVideoHeader( state.OutputContainer, videoCodec, audioCodec, state.OutputWidth, state.OutputHeight, state.TargetVideoBitDepth, state.OutputVideoBitrate, state.TargetTimestamp, isStaticallyStreamed, state.RunTimeTicks, state.TargetVideoProfile, state.TargetVideoLevel, state.TargetFramerate, state.TargetPacketLength, state.TranscodeSeekInfo, state.IsTargetAnamorphic, state.IsTargetInterlaced, state.TargetRefFrames, state.TargetVideoStreamCount, state.TargetAudioStreamCount, state.TargetVideoCodecTag, state.IsTargetAVC).FirstOrDefault() ?? string.Empty; } }
private void AttachMediaStreamInfo(StreamState state, List<MediaStream> mediaStreams, VideoStreamRequest videoRequest, string requestedUrl) { if (videoRequest != null) { if (string.IsNullOrEmpty(videoRequest.VideoCodec)) { videoRequest.VideoCodec = InferVideoCodec(requestedUrl); } state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video); state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false); state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio); if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal) { state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream); } if (state.VideoStream != null && state.VideoStream.IsInterlaced) { state.DeInterlace = true; } EnforceResolutionLimit(state, videoRequest); } else { state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true); } state.AllMediaStreams = mediaStreams; }
private string GetInputPathArgument(StreamState state) { var protocol = state.InputProtocol; var mediaPath = state.MediaPath ?? string.Empty; var inputPath = new[] { mediaPath }; if (state.IsInputVideo) { if (!(state.VideoType == VideoType.Iso && state.IsoMount == null)) { inputPath = MediaEncoderHelpers.GetInputArgument(FileSystem, mediaPath, state.InputProtocol, state.IsoMount, state.PlayableStreamFileNames); } } return MediaEncoder.GetInputArgument(inputPath, protocol); }
/// <summary> /// Adds the dlna headers. /// </summary> /// <param name="state">The state.</param> /// <param name="responseHeaders">The response headers.</param> /// <param name="isStaticallyStreamed">if set to <c>true</c> [is statically streamed].</param> /// <returns><c>true</c> if XXXX, <c>false</c> otherwise</returns> protected void AddDlnaHeaders(StreamState state, IDictionary<string, string> responseHeaders, bool isStaticallyStreamed) { var profile = state.DeviceProfile; var transferMode = GetHeader("transferMode.dlna.org"); responseHeaders["transferMode.dlna.org"] = string.IsNullOrEmpty(transferMode) ? "Streaming" : transferMode; responseHeaders["realTimeInfo.dlna.org"] = "DLNA.ORG_TLAG=*"; if (state.RunTimeTicks.HasValue && !isStaticallyStreamed && profile != null) { AddTimeSeekResponseHeaders(state, responseHeaders); } if (profile == null) { profile = DlnaManager.GetDefaultProfile(); } var audioCodec = state.OutputAudioCodec; if (string.Equals(audioCodec, "copy", StringComparison.OrdinalIgnoreCase) && state.AudioStream != null) { audioCodec = state.AudioStream.Codec; } if (state.VideoRequest == null) { responseHeaders["contentFeatures.dlna.org"] = new ContentFeatureBuilder(profile) .BuildAudioHeader( state.OutputContainer, audioCodec, state.OutputAudioBitrate, state.OutputAudioSampleRate, state.OutputAudioChannels, isStaticallyStreamed, state.RunTimeTicks, state.TranscodeSeekInfo ); } else { var videoCodec = state.OutputVideoCodec; if (string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase) && state.VideoStream != null) { videoCodec = state.VideoStream.Codec; } responseHeaders["contentFeatures.dlna.org"] = new ContentFeatureBuilder(profile) .BuildVideoHeader( state.OutputContainer, videoCodec, audioCodec, state.OutputWidth, state.OutputHeight, state.TargetVideoBitDepth, state.OutputVideoBitrate, state.OutputAudioBitrate, state.OutputAudioChannels, state.TargetTimestamp, isStaticallyStreamed, state.RunTimeTicks, state.TargetVideoProfile, state.TargetVideoLevel, state.TargetFramerate, state.TargetPacketLength, state.TranscodeSeekInfo, state.IsTargetAnamorphic ).FirstOrDefault() ?? string.Empty; } foreach (var item in responseHeaders) { Request.Response.AddHeader(item.Key, item.Value); } }
private async Task AcquireResources(StreamState state, CancellationTokenSource cancellationTokenSource) { if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && IsoManager.CanMount(state.MediaPath)) { state.IsoMount = await IsoManager.Mount(state.MediaPath, cancellationTokenSource.Token).ConfigureAwait(false); } if (state.MediaSource.RequiresOpening && string.IsNullOrWhiteSpace(state.Request.LiveStreamId)) { var liveStreamResponse = await MediaSourceManager.OpenLiveStream(new LiveStreamRequest { OpenToken = state.MediaSource.OpenToken }, false, cancellationTokenSource.Token).ConfigureAwait(false); AttachMediaSourceInfo(state, liveStreamResponse.MediaSource, state.VideoRequest, state.RequestedUrl); if (state.VideoRequest != null) { TryStreamCopy(state, state.VideoRequest); } } if (state.MediaSource.BufferMs.HasValue) { await Task.Delay(state.MediaSource.BufferMs.Value, cancellationTokenSource.Token).ConfigureAwait(false); } }
/// <summary> /// Enforces the resolution limit. /// </summary> /// <param name="state">The state.</param> /// <param name="videoRequest">The video request.</param> private void EnforceResolutionLimit(StreamState state, VideoStreamRequest videoRequest) { // Switch the incoming params to be ceilings rather than fixed values videoRequest.MaxWidth = videoRequest.MaxWidth ?? videoRequest.Width; videoRequest.MaxHeight = videoRequest.MaxHeight ?? videoRequest.Height; videoRequest.Width = null; videoRequest.Height = null; }
/// <summary> /// Starts the FFMPEG. /// </summary> /// <param name="state">The state.</param> /// <param name="outputPath">The output path.</param> /// <param name="cancellationTokenSource">The cancellation token source.</param> /// <param name="workingDirectory">The working directory.</param> /// <returns>Task.</returns> protected async Task<TranscodingJob> StartFfMpeg(StreamState state, string outputPath, CancellationTokenSource cancellationTokenSource, string workingDirectory = null) { FileSystem.CreateDirectory(Path.GetDirectoryName(outputPath)); await AcquireResources(state, cancellationTokenSource).ConfigureAwait(false); var transcodingId = Guid.NewGuid().ToString("N"); var commandLineArgs = GetCommandLineArguments(outputPath, state, true); if (ApiEntryPoint.Instance.GetEncodingOptions().EnableDebugLogging) { commandLineArgs = "-loglevel debug " + commandLineArgs; } var process = new Process { StartInfo = new ProcessStartInfo { CreateNoWindow = true, UseShellExecute = false, // Must consume both stdout and stderr or deadlocks may occur RedirectStandardOutput = true, RedirectStandardError = true, RedirectStandardInput = true, FileName = MediaEncoder.EncoderPath, Arguments = commandLineArgs, WindowStyle = ProcessWindowStyle.Hidden, ErrorDialog = false }, EnableRaisingEvents = true }; if (!string.IsNullOrWhiteSpace(workingDirectory)) { process.StartInfo.WorkingDirectory = workingDirectory; } var transcodingJob = ApiEntryPoint.Instance.OnTranscodeBeginning(outputPath, state.Request.PlaySessionId, state.MediaSource.LiveStreamId, transcodingId, TranscodingJobType, process, state.Request.DeviceId, state, cancellationTokenSource); var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments; Logger.Info(commandLineLogMessage); var logFilePath = Path.Combine(ServerConfigurationManager.ApplicationPaths.LogDirectoryPath, "transcode-" + Guid.NewGuid() + ".txt"); FileSystem.CreateDirectory(Path.GetDirectoryName(logFilePath)); // FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory. state.LogFileStream = FileSystem.GetFileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, true); var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(Request.AbsoluteUri + Environment.NewLine + Environment.NewLine + JsonSerializer.SerializeToString(state.MediaSource) + Environment.NewLine + Environment.NewLine + commandLineLogMessage + Environment.NewLine + Environment.NewLine); await state.LogFileStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationTokenSource.Token).ConfigureAwait(false); process.Exited += (sender, args) => OnFfMpegProcessExited(process, transcodingJob, state); try { process.Start(); } catch (Exception ex) { Logger.ErrorException("Error starting ffmpeg", ex); ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType, state); throw; } // MUST read both stdout and stderr asynchronously or a deadlock may occurr process.BeginOutputReadLine(); // Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback StartStreamingLog(transcodingJob, state, process.StandardError.BaseStream, state.LogFileStream); // Wait for the file to exist before proceeeding while (!FileSystem.FileExists(state.WaitForPath ?? outputPath) && !transcodingJob.HasExited) { await Task.Delay(100, cancellationTokenSource.Token).ConfigureAwait(false); } if (state.IsInputVideo && transcodingJob.Type == TranscodingJobType.Progressive) { await Task.Delay(1000, cancellationTokenSource.Token).ConfigureAwait(false); if (state.ReadInputAtNativeFramerate) { await Task.Delay(1500, cancellationTokenSource.Token).ConfigureAwait(false); } } StartThrottler(state, transcodingJob); return transcodingJob; }
/// <summary> /// Gets the number of threads. /// </summary> /// <returns>System.Int32.</returns> /// <exception cref="System.Exception">Unrecognized MediaEncodingQuality value.</exception> protected int GetNumberOfThreads(StreamState state, bool isWebm) { // Use more when this is true. -re will keep cpu usage under control if (state.ReadInputAtNativeFramerate) { if (isWebm) { return Math.Max(Environment.ProcessorCount - 1, 2); } return 0; } // Webm: http://www.webmproject.org/docs/encoder-parameters/ // The decoder will usually automatically use an appropriate number of threads according to how many cores are available but it can only use multiple threads // for the coefficient data if the encoder selected --token-parts > 0 at encode time. switch (GetQualitySetting()) { case EncodingQuality.HighSpeed: return 2; case EncodingQuality.HighQuality: return 2; case EncodingQuality.MaxQuality: return isWebm ? Math.Max(Environment.ProcessorCount - 1, 2) : 0; default: throw new Exception("Unrecognized MediaEncodingQuality value."); } }
protected string GetVideoBitrateParam(StreamState state, string videoCodec, bool isHls) { var bitrate = state.OutputVideoBitrate; if (bitrate.HasValue) { var hasFixedResolution = state.VideoRequest.HasFixedResolution; if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase)) { if (hasFixedResolution) { return string.Format(" -minrate:v ({0}*.90) -maxrate:v ({0}*1.10) -bufsize:v {0} -b:v {0}", bitrate.Value.ToString(UsCulture)); } // With vpx when crf is used, b:v becomes a max rate // https://trac.ffmpeg.org/wiki/vpxEncodingGuide. But higher bitrate source files -b:v causes judder so limite the bitrate but dont allow it to "saturate" the bitrate. So dont contrain it down just up. return string.Format(" -maxrate:v {0} -bufsize:v ({0}*2) -b:v {0}", bitrate.Value.ToString(UsCulture)); } if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase)) { return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture)); } // H264 if (hasFixedResolution) { if (isHls) { return string.Format(" -b:v {0} -maxrate ({0}*.80) -bufsize {0}", bitrate.Value.ToString(UsCulture)); } return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture)); } return string.Format(" -maxrate {0} -bufsize {1}", bitrate.Value.ToString(UsCulture), (bitrate.Value * 2).ToString(UsCulture)); } return string.Empty; }
protected string GetAudioFilterParam(StreamState state, bool isHls) { var volParam = string.Empty; var audioSampleRate = string.Empty; var channels = state.OutputAudioChannels; // Boost volume to 200% when downsampling from 6ch to 2ch if (channels.HasValue && channels.Value <= 2) { if (state.AudioStream != null && state.AudioStream.Channels.HasValue && state.AudioStream.Channels.Value > 5) { volParam = ",volume=" + ServerConfigurationManager.Configuration.DownMixAudioBoost.ToString(UsCulture); } } if (state.OutputAudioSampleRate.HasValue) { audioSampleRate = state.OutputAudioSampleRate.Value + ":"; } var adelay = isHls ? "adelay=1," : string.Empty; var pts = string.Empty; if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream) { var seconds = TimeSpan.FromTicks(state.Request.StartTimeTicks ?? 0).TotalSeconds; pts = string.Format(",asetpts=PTS-{0}/TB", Math.Round(seconds).ToString(UsCulture)); } return string.Format("-af \"{0}aresample={1}async={4}{2}{3}\"", adelay, audioSampleRate, volParam, pts, state.OutputAudioSync); }
/// <summary> /// Gets the output file extension. /// </summary> /// <param name="state">The state.</param> /// <returns>System.String.</returns> protected virtual string GetOutputFileExtension(StreamState state) { return Path.GetExtension(state.RequestedUrl); }
/// <summary> /// Gets the text subtitle param. /// </summary> /// <param name="state">The state.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>System.String.</returns> protected string GetTextSubtitleParam(StreamState state, CancellationToken cancellationToken) { var seconds = Math.Round(TimeSpan.FromTicks(state.Request.StartTimeTicks ?? 0).TotalSeconds); if (state.SubtitleStream.IsExternal) { var subtitlePath = state.SubtitleStream.Path; var charsetParam = string.Empty; if (!string.IsNullOrEmpty(state.SubtitleStream.Language)) { var charenc = SubtitleEncoder.GetSubtitleFileCharacterSet(subtitlePath, state.SubtitleStream.Language); if (!string.IsNullOrEmpty(charenc)) { charsetParam = ":charenc=" + charenc; } } // TODO: Perhaps also use original_size=1920x800 ?? return string.Format("subtitles=filename='{0}'{1},setpts=PTS -{2}/TB", subtitlePath.Replace('\\', '/').Replace(":/", "\\:/"), charsetParam, seconds.ToString(UsCulture)); } return string.Format("subtitles='{0}:si={1}',setpts=PTS -{2}/TB", state.MediaPath.Replace('\\', '/').Replace(":/", "\\:/"), state.InternalSubtitleStreamOffset.ToString(UsCulture), seconds.ToString(UsCulture)); }
/// <summary> /// Gets the user agent param. /// </summary> /// <param name="state">The state.</param> /// <returns>System.String.</returns> private string GetUserAgentParam(StreamState state) { string useragent = null; state.RemoteHttpHeaders.TryGetValue("User-Agent", out useragent); if (!string.IsNullOrWhiteSpace(useragent)) { return "-user-agent \"" + useragent + "\""; } return string.Empty; }
/// <summary> /// Gets the probe size argument. /// </summary> /// <param name="state">The state.</param> /// <returns>System.String.</returns> private string GetProbeSizeArgument(StreamState state) { if (state.PlayableStreamFileNames.Count > 0) { return MediaEncoder.GetProbeSizeArgument(state.PlayableStreamFileNames.ToArray(), state.InputProtocol); } return MediaEncoder.GetProbeSizeArgument(new[] { state.MediaPath }, state.InputProtocol); }
/// <summary> /// Processes the exited. /// </summary> /// <param name="process">The process.</param> /// <param name="state">The state.</param> /// <param name="outputPath">The output path.</param> private void OnFfMpegProcessExited(Process process, StreamState state, string outputPath) { var job = ApiEntryPoint.Instance.GetTranscodingJob(outputPath, TranscodingJobType); if (job != null) { job.HasExited = true; } Logger.Debug("Disposing stream resources"); state.Dispose(); try { Logger.Info("FFMpeg exited with code {0}", process.ExitCode); } catch { Logger.Error("FFMpeg exited with an error."); } // This causes on exited to be called twice: //try //{ // // Dispose the process // process.Dispose(); //} //catch (Exception ex) //{ // Logger.ErrorException("Error disposing ffmpeg.", ex); //} }
private async Task AcquireResources(StreamState state, CancellationTokenSource cancellationTokenSource) { if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && IsoManager.CanMount(state.MediaPath)) { state.IsoMount = await IsoManager.Mount(state.MediaPath, cancellationTokenSource.Token).ConfigureAwait(false); } if (string.IsNullOrEmpty(state.MediaPath)) { if (string.Equals(state.ItemType, typeof(LiveTvChannel).Name)) { var streamInfo = await LiveTvManager.GetChannelStream(state.Request.Id, cancellationTokenSource.Token).ConfigureAwait(false); state.LiveTvStreamId = streamInfo.Id; if (!string.IsNullOrEmpty(streamInfo.Path)) { state.MediaPath = streamInfo.Path; state.InputProtocol = MediaProtocol.File; await Task.Delay(1500, cancellationTokenSource.Token).ConfigureAwait(false); } else if (!string.IsNullOrEmpty(streamInfo.Url)) { state.MediaPath = streamInfo.Url; state.InputProtocol = MediaProtocol.Http; } } else if (string.Equals(state.ItemType, typeof(LiveTvVideoRecording).Name) || string.Equals(state.ItemType, typeof(LiveTvAudioRecording).Name)) { var streamInfo = await LiveTvManager.GetRecordingStream(state.Request.Id, cancellationTokenSource.Token).ConfigureAwait(false); state.LiveTvStreamId = streamInfo.Id; if (!string.IsNullOrEmpty(streamInfo.Path)) { state.MediaPath = streamInfo.Path; state.InputProtocol = MediaProtocol.File; await Task.Delay(1500, cancellationTokenSource.Token).ConfigureAwait(false); } else if (!string.IsNullOrEmpty(streamInfo.Url)) { state.MediaPath = streamInfo.Url; state.InputProtocol = MediaProtocol.Http; } } } }
protected double? GetFramerateParam(StreamState state) { if (state.VideoRequest != null) { if (state.VideoRequest.Framerate.HasValue) { return state.VideoRequest.Framerate.Value; } var maxrate = state.VideoRequest.MaxFramerate ?? 23.97602; if (state.VideoStream != null) { var contentRate = state.VideoStream.AverageFrameRate ?? state.VideoStream.RealFrameRate; if (contentRate.HasValue && contentRate.Value > maxrate) { return maxrate; } } } return null; }
private async void StartStreamingLog(StreamState state, Stream source, Stream target) { try { using (var reader = new StreamReader(source)) { while (!reader.EndOfStream) { var line = await reader.ReadLineAsync().ConfigureAwait(false); ParseLogLine(line, state); var bytes = Encoding.UTF8.GetBytes(Environment.NewLine + line); await target.WriteAsync(bytes, 0, bytes.Length).ConfigureAwait(false); } } } catch (Exception ex) { Logger.ErrorException("Error reading ffmpeg log", ex); } }
/// <summary> /// Gets the output file path. /// </summary> /// <param name="state">The state.</param> /// <returns>System.String.</returns> private string GetOutputFilePath(StreamState state) { var folder = ServerConfigurationManager.ApplicationPaths.TranscodingTempPath; var outputFileExtension = GetOutputFileExtension(state); var data = GetCommandLineArguments("dummy\\dummy", state, false); data += "-" + (state.Request.DeviceId ?? string.Empty); return Path.Combine(folder, data.GetMD5().ToString("N") + (outputFileExtension ?? string.Empty).ToLower()); }
private void ParseLogLine(string line, StreamState state) { float? framerate = null; double? percent = null; var parts = line.Split(' '); var totalMs = state.RunTimeTicks.HasValue ? TimeSpan.FromTicks(state.RunTimeTicks.Value).TotalMilliseconds : 0; var startMs = state.Request.StartTimeTicks.HasValue ? TimeSpan.FromTicks(state.Request.StartTimeTicks.Value).TotalMilliseconds : 0; for (var i = 0; i < parts.Length; i++) { var part = parts[i]; if (string.Equals(part, "fps=", StringComparison.OrdinalIgnoreCase) && (i + 1 < parts.Length)) { var rate = parts[i + 1]; float val; if (float.TryParse(rate, NumberStyles.Any, UsCulture, out val)) { framerate = val; } } else if (state.RunTimeTicks.HasValue && part.StartsWith("time=", StringComparison.OrdinalIgnoreCase)) { var time = part.Split(new[] { '=' }, 2).Last(); TimeSpan val; if (TimeSpan.TryParse(time, UsCulture, out val)) { var currentMs = startMs + val.TotalMilliseconds; var percentVal = currentMs / totalMs; percent = 100 * percentVal; } } } if (framerate.HasValue || percent.HasValue) { ApiEntryPoint.Instance.ReportTranscodingProgress(state, framerate, percent); } }
/// <summary> /// Gets the output file path. /// </summary> /// <param name="state">The state.</param> /// <returns>System.String.</returns> protected string GetOutputFilePath(StreamState state) { var folder = ApplicationPaths.EncodedMediaCachePath; return(Path.Combine(folder, GetCommandLineArguments("dummy\\dummy", state).GetMD5() + GetOutputFileExtension(state).ToLower())); }