public static bool AudioCpuEncoding(FileItem fileItem) { try { LogManager.AddEncodingMessage(LogLevel.Information, "SourceFilePath " + Path.GetFileName(fileItem.SourceFilePath), "Start AudioCpuEncoding"); fileItem.AudioCpuEncodeProcess.StartProcessDateTime(); if (fileItem.FileContainer.SourceFileItem.AudioCodec == "aac") { fileItem.AudioCpuEncodeProcess.StartProcessDateTime(); File.Copy(fileItem.SourceFilePath, fileItem.TempFilePath); } else { // encoding audio de la source string arguments = $"-y -i {Path.GetFileName(fileItem.SourceFilePath)} -vcodec copy -acodec aac -strict -2 {Path.GetFileName(fileItem.TempFilePath)}"; var ffmpegProcessManager = new FfmpegProcessManager(fileItem, fileItem.AudioCpuEncodeProcess); ffmpegProcessManager.StartProcess(arguments, VideoSettings.Instance.EncodeTimeout); } fileItem.SetVideoAacTempFilePath(fileItem.TempFilePath); LogManager.AddEncodingMessage(LogLevel.Information, "OutputFileName " + Path.GetFileName(fileItem.VideoAacTempFilePath) + " / FileSize " + fileItem.FileSize + " / Format " + fileItem.VideoSize, "End AudioCpuEncoding"); fileItem.AudioCpuEncodeProcess.EndProcessDateTime(); return(true); } catch (Exception ex) { string message = "Exception AudioCpuEncoding : Video Duration " + fileItem.VideoDuration + " / FileSize " + fileItem.FileSize + " / Progress " + fileItem.AudioCpuEncodeProcess.Progress; fileItem.AudioCpuEncodeProcess.SetErrorMessage("Exception non gérée", message, ex); TempFileManager.SafeDeleteTempFile(fileItem.TempFilePath); return(false); } }
public static bool VideoGpuEncoding(FileItem fileItem) { try { LogManager.AddEncodingMessage(LogLevel.Information, "SourceFilePath " + Path.GetFileName(fileItem.VideoAacTempFilePath) + " -> 1:N formats", "Start VideoGpuEncoding"); fileItem.VideoGpuEncodeProcess.StartProcessDateTime(); // encoding video 1:N formats string arguments = $"-y -hwaccel cuvid -vcodec h264_cuvid -vsync 0 -i {Path.GetFileName(fileItem.VideoAacTempFilePath)}"; if (VideoSettings.Instance.NVidiaCard != "QuadroP5000") { arguments = arguments.Replace(" -hwaccel cuvid -vcodec h264_cuvid -vsync 0 ", " "); } FileItem sourceFile = fileItem.FileContainer.SourceFileItem; foreach (FileItem item in fileItem.FileContainer.EncodedFileItems) { string size = GetSize(item.VideoSize, fileItem.VideoWidth.Value, fileItem.VideoHeight.Value); string maxRate = item.VideoSize.MaxRate; if (sourceFile.VideoPixelFormat != "yuv420p") { arguments += " -pixel_format yuv420p"; } // si rotation 90 ou 270, inverser la largeur et la hauteur de la video if (sourceFile.VideoRotate.HasValue && (sourceFile.VideoRotate.Value == 90 || sourceFile.VideoRotate.Value == 270)) { string[] sizes = size.Split(':'); size = $"{sizes[1]}:{sizes[0]}"; } arguments += $" -vf scale_npp={size} -b:v {maxRate} -maxrate {maxRate} -bufsize {maxRate} -vcodec h264_nvenc -acodec copy {Path.GetFileName(item.TempFilePath)}"; if (VideoSettings.Instance.NVidiaCard != "QuadroP5000") { arguments = arguments.Replace("scale_npp=", "scale="); } } var ffmpegProcessManager = new FfmpegProcessManager(fileItem, fileItem.VideoGpuEncodeProcess); ffmpegProcessManager.StartProcess(arguments, VideoSettings.Instance.EncodeTimeout); foreach (var item in fileItem.FileContainer.EncodedFileItems) { item.ReplaceOutputPathWithTempPath(); LogManager.AddEncodingMessage(LogLevel.Information, "OutputFileName " + Path.GetFileName(item.OutputFilePath) + " / FileSize " + item.FileSize + " / Format " + item.VideoSize, "End VideoGpuEncoding"); } fileItem.VideoGpuEncodeProcess.EndProcessDateTime(); return(true); } catch (Exception ex) { string message = "Exception VideoGpuEncoding : Video Duration " + fileItem.VideoDuration + " / FileSize " + fileItem.FileSize + " / Progress " + fileItem.VideoGpuEncodeProcess.Progress; fileItem.VideoGpuEncodeProcess.SetErrorMessage("Exception non gérée", message, ex); return(false); } }
public static bool AudioVideoCpuEncoding(FileItem fileItem) { try { FileItem sourceFile = fileItem.FileContainer.SourceFileItem; LogManager.AddEncodingMessage(LogLevel.Information, "SourceFilePath " + Path.GetFileName(sourceFile.SourceFilePath) + " -> " + fileItem.VideoSize, "Start AudioVideoCpuEncoding"); fileItem.AudioVideoCpuEncodeProcess.StartProcessDateTime(); string size = GetSize(fileItem.VideoSize, sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value); string arguments = $"-y -i {Path.GetFileName(sourceFile.SourceFilePath)}"; if (sourceFile.VideoPixelFormat != "yuv420p") { arguments += " -pixel_format yuv420p"; } // si rotation 90 ou 270, inverser la largeur et la hauteur de la video if (sourceFile.VideoRotate.HasValue && (sourceFile.VideoRotate.Value == 90 || sourceFile.VideoRotate.Value == 270)) { string[] sizes = size.Split(':'); size = $"{sizes[1]}:{sizes[0]}"; } arguments += $" -vf scale={size}"; if (sourceFile.VideoCodec != "h264") { arguments += " -vcodec libx264"; } if (sourceFile.AudioCodec != "aac") { arguments += " -acodec aac -strict -2"; //-strict -2 pour forcer aac sur ubuntu } else { arguments += " -acodec copy"; } arguments += $" {Path.GetFileName(fileItem.TempFilePath)}"; var ffmpegProcessManager = new FfmpegProcessManager(fileItem, fileItem.AudioVideoCpuEncodeProcess); ffmpegProcessManager.StartProcess(arguments, VideoSettings.Instance.EncodeTimeout); fileItem.ReplaceOutputPathWithTempPath(); LogManager.AddEncodingMessage(LogLevel.Information, "OutputFileName " + Path.GetFileName(fileItem.OutputFilePath) + " / FileSize " + fileItem.FileSize + " / Format " + fileItem.VideoSize, "End AudioVideoCpuEncoding"); fileItem.AudioVideoCpuEncodeProcess.EndProcessDateTime(); return(true); } catch (Exception ex) { string message = "Exception AudioVideoCpuEncoding : Video Duration " + fileItem.VideoDuration + " / FileSize " + fileItem.FileSize + " / Progress " + fileItem.AudioVideoCpuEncodeProcess.Progress; fileItem.AudioVideoCpuEncodeProcess.SetErrorMessage("Exception non gérée", message, ex); return(false); } }
public static bool VideoGpuEncoding(FileItem fileItem) { try { LogManager.AddEncodingMessage(LogLevel.Information, "SourceFilePath " + Path.GetFileName(fileItem.VideoAacTempFilePath) + " -> 1:N formats", "Start VideoGpuEncoding"); fileItem.VideoGpuEncodeProcess.StartProcessDateTime(); // encoding video 1:N formats string arguments = $"-y -hwaccel cuvid -vcodec h264_cuvid -vsync 0 -i {Path.GetFileName(fileItem.VideoAacTempFilePath)}"; //string arguments = $"-y -i {Path.GetFileName(fileItem.VideoAacTempFilePath)}"; FileItem sourceFile = fileItem.FileContainer.SourceFileItem; foreach (var item in fileItem.FileContainer.EncodedFileItems) { string size = GetSize(item.VideoSize, fileItem.VideoWidth.Value, fileItem.VideoHeight.Value); string maxRate = GetMaxRate(item.VideoSize); if (sourceFile.VideoPixelFormat != "yuv420p") { arguments += " -pixel_format yuv420p"; } arguments += $" -vf scale_npp={size} -b:v {maxRate} -maxrate {maxRate} -bufsize {maxRate} -vcodec h264_nvenc -acodec copy {Path.GetFileName(item.TempFilePath)}"; //arguments += $" -vf scale={size} -b:v {maxRate} -maxrate {maxRate} -bufsize {maxRate} -vcodec h264_nvenc -acodec copy {Path.GetFileName(item.TempFilePath)}"; } var ffmpegProcessManager = new FfmpegProcessManager(fileItem, fileItem.VideoGpuEncodeProcess); ffmpegProcessManager.StartProcess(arguments, VideoSettings.Instance.EncodeTimeout); foreach (var item in fileItem.FileContainer.EncodedFileItems) { item.SetOutputFilePath(item.TempFilePath); LogManager.AddEncodingMessage(LogLevel.Information, "OutputFileName " + Path.GetFileName(item.OutputFilePath) + " / FileSize " + item.FileSize + " / Format " + item.VideoSize, "End VideoGpuEncoding"); } fileItem.VideoGpuEncodeProcess.EndProcessDateTime(); TempFileManager.SafeDeleteTempFile(fileItem.VideoAacTempFilePath); return(true); } catch (Exception ex) { string message = "Exception VideoGpuEncoding : Video Duration " + fileItem.VideoDuration + " / FileSize " + fileItem.FileSize + " / Progress " + fileItem.VideoGpuEncodeProcess.Progress; fileItem.VideoGpuEncodeProcess.SetErrorMessage("Exception non gérée", message, ex); foreach (FileItem item in fileItem.FileContainer.EncodedFileItems) { TempFileManager.SafeDeleteTempFile(item.TempFilePath); } TempFileManager.SafeDeleteTempFile(fileItem.VideoAacTempFilePath); return(false); } }
public static bool AudioVideoCpuEncoding(FileItem fileItem) { try { FileItem sourceFile = fileItem.FileContainer.SourceFileItem; LogManager.AddEncodingMessage(LogLevel.Information, "SourceFilePath " + Path.GetFileName(sourceFile.SourceFilePath) + " -> " + fileItem.VideoSize, "Start AudioVideoCpuEncoding"); fileItem.AudioVideoCpuEncodeProcess.StartProcessDateTime(); string size = GetSize(fileItem.VideoSize, sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value); string arguments = $"-y -i {Path.GetFileName(sourceFile.SourceFilePath)}"; if (sourceFile.VideoPixelFormat != "yuv420p") { arguments += " -pixel_format yuv420p"; } arguments += $" -vf scale={size}"; if (sourceFile.VideoCodec != "h264") { arguments += " -vcodec libx264"; } if (sourceFile.AudioCodec != "aac") { arguments += " -acodec aac -strict -2"; //-strict -2 pour forcer aac sur ubuntu } else { arguments += " -acodec copy"; } arguments += $" {Path.GetFileName(fileItem.TempFilePath)}"; var ffmpegProcessManager = new FfmpegProcessManager(fileItem, fileItem.AudioVideoCpuEncodeProcess); ffmpegProcessManager.StartProcess(arguments, VideoSettings.Instance.EncodeTimeout); fileItem.SetOutputFilePath(fileItem.TempFilePath); LogManager.AddEncodingMessage(LogLevel.Information, "OutputFileName " + Path.GetFileName(fileItem.OutputFilePath) + " / FileSize " + fileItem.FileSize + " / Format " + fileItem.VideoSize, "End AudioVideoCpuEncoding"); fileItem.AudioVideoCpuEncodeProcess.EndProcessDateTime(); return(true); } catch (Exception ex) { string message = "Exception AudioVideoCpuEncoding : Video Duration " + fileItem.VideoDuration + " / FileSize " + fileItem.FileSize + " / Progress " + fileItem.AudioVideoCpuEncodeProcess.Progress; fileItem.AudioVideoCpuEncodeProcess.SetErrorMessage("Exception non gérée", message, ex); TempFileManager.SafeDeleteTempFile(fileItem.TempFilePath); return(false); } }
public static bool Encode(FileItem fileItem) { FileItem sourceFile = fileItem.FileContainer.SourceFileItem; try { fileItem.SpriteEncodeProcess.StartProcessDateTime(); LogManager.AddSpriteMessage(LogLevel.Information, "SourceFilePath " + Path.GetFileName(fileItem.SourceFilePath), "Start Sprite"); int nbImages = VideoSettings.Instance.NbSpriteImages; int heightSprite = VideoSettings.Instance.HeightSpriteImages; // Calculer nb image/s // si < 100s de vidéo -> 1 image/s // sinon (nb secondes de la vidéo / 100) image/s string frameRate = "1"; int duration = sourceFile.VideoDuration.Value; if (duration > nbImages) { frameRate = $"{nbImages}/{duration}"; //frameRate = inverse de image/s } int spriteWidth = SizeHelper.GetWidth(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, heightSprite); string sizeImageMax = $"scale={spriteWidth}:{heightSprite}"; // Extract frameRate image/s de la video string arguments = $"-y -i {Path.GetFileName(fileItem.SourceFilePath)} -r {frameRate} -vf {sizeImageMax} -f image2 {GetPattern(fileItem.TempFilePath)}"; var ffmpegProcessManager = new FfmpegProcessManager(fileItem, fileItem.SpriteEncodeProcess); ffmpegProcessManager.StartProcess(arguments, VideoSettings.Instance.EncodeGetImagesTimeout); IList <string> files = GetListImageFrom(fileItem.TempFilePath); // récupération des images LogManager.AddSpriteMessage(LogLevel.Information, (files.Count - 1) + " images", "Start Combine images"); // garder que les 100 dernières images pour éliminer les premières (1 ou 2 en réalité) int skip = files.Count > VideoSettings.Instance.NbSpriteImages ? files.Count - VideoSettings.Instance.NbSpriteImages : 0; var list = new StringBuilder(); foreach (string imagePath in files.Skip(skip)) { if (list.Length > 0) { list.Append(" "); } list.Append(Path.GetFileName(imagePath)); } arguments = $"montage -mode concatenate -tile 1x {list} {Path.GetFileName(fileItem.TempFilePath)}"; var process = new ProcessManager(Path.Combine(GeneralSettings.Instance.ImageMagickPath, "magick"), arguments, LogManager.SpriteLogger); bool successSprite = process.Launch(5); TempFileManager.SafeDeleteTempFiles(files); // suppression des images if (!successSprite) { fileItem.SpriteEncodeProcess.SetErrorMessage("Error while combine images", "Error creation sprite while combine images"); return(false); } fileItem.ReplaceOutputPathWithTempPath(); LogManager.AddSpriteMessage(LogLevel.Information, "OutputFileName " + Path.GetFileName(fileItem.OutputFilePath) + " / FileSize " + fileItem.FileSize, "End Sprite"); fileItem.SpriteEncodeProcess.EndProcessDateTime(); return(true); } catch (Exception ex) { string message = "Video Duration " + sourceFile.VideoDuration + " / FileSize " + fileItem.FileSize + " / Progress " + fileItem.SpriteEncodeProcess.Progress; fileItem.SpriteEncodeProcess.SetErrorMessage("Exception non gérée", message, ex); IList <string> files = GetListImageFrom(fileItem.TempFilePath); // récupération des images TempFileManager.SafeDeleteTempFiles(files); // suppression des images return(false); } }