protected override async Task <TranscodeContext> TranscodeAudioAsync(string clientId, AudioTranscoding audio, double timeStart, double timeDuration, bool waitForBuffer) { FFMpegTranscodeContext context = new FFMpegTranscodeContext(_cacheEnabled, _cachePath); context.TargetDuration = audio.SourceDuration.Value; if (timeStart == 0 && audio.TargetIsLive == false && _cacheEnabled) { timeDuration = 0; context.Partial = false; } else { audio.TargetIsLive = true; context.Partial = true; } if (audio.TargetAudioContainer == AudioContainer.Unknown) { audio.TargetAudioContainer = audio.SourceAudioContainer; } string transcodingFile = GetTranscodingAudioFileName(audio, timeStart); transcodingFile = Path.Combine(_cachePath, transcodingFile); if (File.Exists(transcodingFile)) { //Use non-partial context if possible TranscodeContext existingContext = await GetExistingTranscodeContextAsync(clientId, audio.TranscodeId).ConfigureAwait(false); if (existingContext != null) { existingContext.TargetFile = transcodingFile; if (existingContext.Stream == null) { existingContext.AssignStream(await GetFileStreamAsync(transcodingFile).ConfigureAwait(false)); } if (existingContext.CurrentDuration.TotalSeconds == 0) { double bitrate = 0; if (audio.TargetAudioBitrate.HasValue) { bitrate = audio.TargetAudioBitrate.Value; } else if (audio.SourceAudioBitrate.HasValue) { bitrate = audio.SourceAudioBitrate.Value; } bitrate *= 1024; //Bitrate in bits/s if (bitrate > 0) { long startByte = Convert.ToInt64((bitrate * timeStart) / 8.0); if (existingContext.Stream.Length > startByte) { return(existingContext); } } } else { if (existingContext.CurrentDuration.TotalSeconds > timeStart) { return(existingContext); } } } else { //Presume that it is a cached file TouchFile(transcodingFile); context.Partial = false; context.TargetFile = transcodingFile; context.AssignStream(await GetFileStreamAsync(transcodingFile).ConfigureAwait(false)); return(context); } } FFMpegTranscodeData data = new FFMpegTranscodeData(_cachePath) { TranscodeId = audio.TranscodeId, ClientId = clientId }; if (string.IsNullOrEmpty(audio.TranscoderBinPath) == false) { data.TranscoderBinPath = audio.TranscoderBinPath; } if (string.IsNullOrEmpty(audio.TranscoderArguments) == false) { data.ConcatedFileInput = audio.ConcatSourceMediaPaths; data.TranscoderArguments = audio.TranscoderArguments; data.InputMediaFilePaths = audio.SourceMediaPaths; data.OutputFilePath = transcodingFile; context.TargetFile = transcodingFile; } else { _ffMpegCommandline.InitTranscodingParameters(audio.ConcatSourceMediaPaths, audio.SourceMediaPaths, ref data); _ffMpegCommandline.AddTranscodingThreadsParameters(true, ref data); _ffMpegCommandline.AddTimeParameters(audio, timeStart, timeDuration, data); _ffMpegCommandline.AddAudioParameters(audio, data); context.TargetFile = await _ffMpegCommandline.AddTargetAudioFormatAndOutputFileParametersAsync(audio, transcodingFile, data).ConfigureAwait(false); data.OutputArguments.Add("-vn"); } _logger.Debug("FFMpegMediaConverter: Invoking transcoder to transcode audio file '{0}' for transcode '{1}'", audio.SourceMediaPaths, audio.TranscodeId); context.Start(); context.AssignStream(await ExecuteTranscodingProcessAsync(data, context, waitForBuffer).ConfigureAwait(false)); return(context); }
protected override async Task <TranscodeContext> TranscodeImageAsync(string clientId, ImageTranscoding image, bool waitForBuffer) { FFMpegTranscodeContext context = new FFMpegTranscodeContext(_cacheEnabled, _cachePath); context.Partial = false; string transcodingFile = Path.Combine(_cachePath, GetTranscodingImageFileName(image)); if (File.Exists(transcodingFile) == true) { //Use existing context if possible TranscodeContext existingContext = await GetExistingTranscodeContextAsync(clientId, image.TranscodeId).ConfigureAwait(false); if (existingContext != null) { existingContext.TargetFile = transcodingFile; if (existingContext.Stream == null) { existingContext.AssignStream(await GetFileStreamAsync(transcodingFile).ConfigureAwait(false)); } return(existingContext); } else { //Presume that it is a cached file TouchFile(transcodingFile); context.Partial = false; context.TargetFile = transcodingFile; context.AssignStream(await GetFileStreamAsync(transcodingFile).ConfigureAwait(false)); return(context); } } FFMpegTranscodeData data = new FFMpegTranscodeData(_cachePath) { TranscodeId = image.TranscodeId, ClientId = clientId }; if (string.IsNullOrEmpty(image.TranscoderBinPath) == false) { data.TranscoderBinPath = image.TranscoderBinPath; } if (string.IsNullOrEmpty(image.TranscoderArguments) == false) { data.ConcatedFileInput = image.ConcatSourceMediaPaths; data.TranscoderArguments = image.TranscoderArguments; data.InputMediaFilePaths = image.SourceMediaPaths; } else { _ffMpegCommandline.InitTranscodingParameters(image.ConcatSourceMediaPaths, image.SourceMediaPaths, ref data); _ffMpegCommandline.AddTranscodingThreadsParameters(true, ref data); _ffMpegCommandline.AddImageParameters(image, data); data.InputArguments[data.FirstResourceIndex].Add("-f image2pipe"); //pipe works with network drives data.OutputArguments.Add("-f image2"); } data.OutputFilePath = transcodingFile; context.TargetFile = transcodingFile; _logger.Debug("FFMpegMediaConverter: Invoking transcoder to transcode image file '{0}' for transcode '{1}'", image.SourceMediaPaths, image.TranscodeId); context.Start(); context.AssignStream(await ExecuteTranscodingProcessAsync(data, context, waitForBuffer).ConfigureAwait(false)); return(context); }
protected override async Task <TranscodeContext> TranscodeVideoAsync(string clientId, VideoTranscoding video, double timeStart, double timeDuration, bool waitForBuffer) { FFMpegTranscodeContext context = new FFMpegTranscodeContext(_cacheEnabled, _cachePath); context.TargetDuration = video.SourceMediaDuration; if (timeStart == 0 && video.TargetIsLive == false && _cacheEnabled) { timeDuration = 0; context.Partial = false; } else if (video.TargetVideoContainer == VideoContainer.Hls) { context.Partial = true; } else { video.TargetIsLive = true; context.Partial = true; } if (video.TargetVideoContainer == VideoContainer.Unknown) { video.TargetVideoContainer = video.SourceVideoContainer; } bool embeddedSupported = false; SubtitleCodec embeddedSubCodec = SubtitleCodec.Unknown; if (video.TargetSubtitleSupport == SubtitleSupport.Embedded) { if (video.TargetVideoContainer == VideoContainer.Matroska) { embeddedSupported = true; embeddedSubCodec = SubtitleCodec.Ass; video.TargetSubtitleCodec = SubtitleCodec.Ass; } else if (video.TargetVideoContainer == VideoContainer.Mp4) { embeddedSupported = true; embeddedSubCodec = SubtitleCodec.MovTxt; video.TargetSubtitleCodec = SubtitleCodec.MovTxt; } else if (video.TargetVideoContainer == VideoContainer.Hls) { embeddedSupported = true; embeddedSubCodec = SubtitleCodec.WebVtt; video.TargetSubtitleCodec = SubtitleCodec.WebVtt; } else if (video.TargetVideoContainer == VideoContainer.Avi) { embeddedSupported = true; embeddedSubCodec = SubtitleCodec.Srt; video.TargetSubtitleCodec = SubtitleCodec.Srt; } //else if (video.TargetVideoContainer == VideoContainer.Mpeg2Ts) //{ // embeddedSupported = true; // embeddedSubCodec = SubtitleCodec.DvbSub; // video.TargetSubtitleCodec = SubtitleCodec.VobSub; //} else { _logger.Debug("FFMpegMediaConverter: Container {0} does not support embedded subtitles", video.TargetVideoContainer); } } video.TargetSubtitleMime = SubtitleHelper.GetSubtitleMime(video.TargetSubtitleCodec); video.PreferredSourceSubtitles = await GetSubtitlesAsync(clientId, video, timeStart).ConfigureAwait(false); string transcodingFile = GetTranscodingVideoFileName(video, timeStart, embeddedSupported); transcodingFile = Path.Combine(_cachePath, transcodingFile); if (File.Exists(transcodingFile)) { //Use non-partial transcode if possible TranscodeContext existingContext = await GetExistingTranscodeContextAsync(clientId, video.TranscodeId).ConfigureAwait(false); if (existingContext != null) { existingContext.TargetFile = transcodingFile; if (existingContext.Stream == null) { existingContext.AssignStream(await GetFileStreamAsync(transcodingFile).ConfigureAwait(false)); } if (existingContext.CurrentDuration.TotalSeconds == 0) { double bitrate = 0; if (video.TargetVideoBitrate.HasValue && video.TargetAudioBitrate.HasValue) { bitrate = video.TargetVideoBitrate.Value + video.TargetAudioBitrate.Value; } else if (video.SourceVideoStream.Bitrate.HasValue && video.SourceAudioStreams.Any(a => a.Bitrate > 0)) { bitrate = video.SourceVideoStream.Bitrate.Value + video.SourceAudioStreams.Max(a => a.Bitrate ?? 0); } bitrate *= 1024; //Bitrate in bits/s if (bitrate > 0) { long startByte = Convert.ToInt64((bitrate * timeStart) / 8.0); if (existingContext.Stream.Length > startByte) { return(existingContext); } } } else { if (existingContext.CurrentDuration.TotalSeconds > timeStart) { return(existingContext); } } } else { //Presume that it is a cached file TouchFile(transcodingFile); context.Partial = false; context.TargetFile = transcodingFile; context.AssignStream(await GetFileStreamAsync(transcodingFile).ConfigureAwait(false)); return(context); } } if (video.TargetVideoContainer == VideoContainer.Hls) { long requestedSegmentSequence = requestedSegmentSequence = Convert.ToInt64(timeStart / HLSSegmentTimeInSeconds); if (requestedSegmentSequence > 0) { requestedSegmentSequence--; //1 segment file margin } string pathName = FFMpegPlaylistManifest.GetPlaylistFolderFromTranscodeFile(_cachePath, transcodingFile); string playlist = Path.Combine(pathName, PlaylistManifest.PLAYLIST_MANIFEST_FILE_NAME); string segmentFile = Path.Combine(pathName, requestedSegmentSequence.ToString("00000") + ".ts"); if (File.Exists(playlist) == true && File.Exists(segmentFile) == true) { //Use exisitng context if possible TranscodeContext existingContext = await GetExistingTranscodeContextAsync(clientId, video.TranscodeId).ConfigureAwait(false); if (existingContext != null) { if (existingContext.LastSegment > requestedSegmentSequence) { existingContext.TargetFile = playlist; existingContext.SegmentDir = pathName; if (existingContext.Stream == null) { existingContext.AssignStream(await GetFileStreamAsync(playlist).ConfigureAwait(false)); } existingContext.HlsBaseUrl = video.HlsBaseUrl; return(existingContext); } } else { //Presume that it is a cached file TouchDirectory(pathName); context.Partial = false; context.TargetFile = playlist; context.SegmentDir = pathName; context.HlsBaseUrl = video.HlsBaseUrl; context.AssignStream(await GetFileStreamAsync(playlist).ConfigureAwait(false)); return(context); } } } FFMpegTranscodeData data = new FFMpegTranscodeData(_cachePath) { TranscodeId = video.TranscodeId, ClientId = clientId }; if (string.IsNullOrEmpty(video.TranscoderBinPath) == false) { data.TranscoderBinPath = video.TranscoderBinPath; } if (string.IsNullOrEmpty(video.TranscoderArguments) == false) { data.ConcatedFileInput = video.ConcatSourceMediaPaths; data.TranscoderArguments = video.TranscoderArguments; data.InputMediaFilePaths = video.SourceMediaPaths; if (video.PreferredSourceSubtitles != null) { foreach (var mediaSourceIndex in video.PreferredSourceSubtitles.Keys) { foreach (var sub in video.PreferredSourceSubtitles[mediaSourceIndex]) { if (string.IsNullOrEmpty(sub.SourcePath) == false) { data.AddSubtitle(mediaSourceIndex, sub.SourcePath); context.TargetSubtitles.Add(sub.SourcePath); } } } } data.OutputFilePath = transcodingFile; context.TargetFile = transcodingFile; } else { data.Encoder = _ffMpegEncoderHandler.StartEncoding(video.TranscodeId, video.TargetVideoCodec); _ffMpegCommandline.InitTranscodingParameters(video.ConcatSourceMediaPaths, video.SourceMediaPaths, ref data); bool useX26XLib = video.TargetVideoCodec == VideoCodec.H264 || video.TargetVideoCodec == VideoCodec.H265; _ffMpegCommandline.AddTranscodingThreadsParameters(!useX26XLib, ref data); int subCopyStream = -1; if (video.PreferredSourceSubtitles.Any()) { if (video.FirstPreferredSourceSubtitle.IsEmbedded) { subCopyStream = video.FirstPreferredSourceSubtitle.StreamIndex; _ffMpegCommandline.AddSubtitleCopyParameters(video.FirstPreferredSourceSubtitle, data); } else if (embeddedSupported) { foreach (int mediaSourceIndex in video.PreferredSourceSubtitles.Keys) { _ffMpegCommandline.AddSubtitleEmbeddingParameters(mediaSourceIndex, video.PreferredSourceSubtitles[mediaSourceIndex], embeddedSubCodec, timeStart, data); } } else if (video.TargetSubtitleSupport != SubtitleSupport.SoftCoded) { video.TargetSubtitleSupport = SubtitleSupport.HardCoded; //Fallback to hardcoded subtitles _logger.Debug("FFMpegMediaConverter: Soft subs not supported. Fallback to hardcoded subtitles"); } } else { embeddedSupported = false; data.OutputArguments.Add("-sn"); } _ffMpegCommandline.AddTimeParameters(video, timeStart, timeDuration, data); _ffMpegCommandline.AddStreamMapParameters(video, data); FFMpegEncoderConfig encoderConfig = _ffMpegEncoderHandler.GetEncoderConfig(data.Encoder); _ffMpegCommandline.AddVideoParameters(video, data.TranscodeId, encoderConfig, data); _ffMpegCommandline.AddVideoAudioParameters(video, data); var result = await _ffMpegCommandline.AddTargetVideoFormatAndOutputFileParametersAsync(video, transcodingFile, timeStart, data).ConfigureAwait(false); context.TargetFile = result.TranscodingFile; context.CurrentSegment = result.StartSegment; if (video.PreferredSourceSubtitles.Any()) { foreach (var sub in video.PreferredSourceSubtitles.SelectMany(s => s.Value)) { if (string.IsNullOrEmpty(sub.SourcePath) == false) { context.TargetSubtitles.Add(sub.SourcePath); } } } } _logger.Info("FFMpegMediaConverter: Invoking transcoder to transcode video file '{0}' for transcode '{1}' with arguments '{2}'", video.SourceMediaPaths.First().Value, video.TranscodeId, String.Join(", ", data.OutputArguments.ToArray())); context.Start(); context.AssignStream(await ExecuteTranscodingProcessAsync(data, context, waitForBuffer).ConfigureAwait(false)); return(context); }