private static void Start() { daemon = Task.Run(() => { while (true) { try { Thread.Sleep(1000); FileItem fileItem; if (!queueFileItems.TryDequeue(out fileItem)) { continue; } CurrentPositionInQueue++; // si le client a pas demandé le progress depuis plus de 20s, annuler l'opération if ((DateTime.UtcNow - fileItem.FileContainer.LastTimeProgressRequested).TotalSeconds > FrontSettings.MaxGetProgressCanceled) { fileItem.EncodeErrorMessage = "Canceled"; fileItem.EncodeProgress = null; fileItem.IpfsErrorMessage = "Canceled"; fileItem.IpfsProgress = null; continue; } // encode video if (!EncodeManager.Encode(fileItem)) { continue; } switch (fileItem.TypeFile) { case TypeFile.SpriteVideo: { string[] files = EncodeManager.GetListImageFrom(fileItem.FilePath); // récupération des images string outputPath = Path.ChangeExtension(TempFileManager.GetNewTempFilePath(), ".jpeg"); // nom du fichier sprite bool successSprite = SpriteManager.CombineBitmap(files.Skip(files.Length - VideoSettings.NbSpriteImages).ToArray(), outputPath); // création du sprite TempFileManager.SafeDeleteTempFiles(files); // suppression des images if (successSprite) { fileItem.FilePath = outputPath; // réaffectation chemin sprite LogManager.AddEncodingMessage("FileSize " + fileItem.FileSize, "End Sprite"); IpfsDaemon.Queue(fileItem); } else { TempFileManager.SafeDeleteTempFile(outputPath); } break; } case TypeFile.EncodedVideo: IpfsDaemon.Queue(fileItem); break; default: throw new InvalidOperationException("type non prévu"); } } catch (Exception ex) { LogManager.AddEncodingMessage(ex.ToString(), "Exception non gérée"); } } }); }
public static bool Encode(FileItem fileItem) { string newEncodedFilePath = null; try { currentFileItem = fileItem; currentFileItem.EncodeProgress = "0.00%"; FileItem sourceFile = currentFileItem.FileContainer.SourceFileItem; string sourceFilePath = sourceFile.FilePath; newEncodedFilePath = Path.ChangeExtension(TempFileManager.GetNewTempFilePath(), ".mp4"); LogManager.AddEncodingMessage("FileName " + Path.GetFileName(newEncodedFilePath), "Start"); VideoSize videoSize = currentFileItem.VideoSize; Debug.WriteLine(Path.GetFileName(sourceFilePath) + " / " + videoSize); var processStartInfo = new ProcessStartInfo(); processStartInfo.FileName = "ffmpeg"; processStartInfo.RedirectStandardError = true; processStartInfo.WorkingDirectory = TempFileManager.GetTempDirectory(); processStartInfo.UseShellExecute = false; processStartInfo.ErrorDialog = false; processStartInfo.CreateNoWindow = true; processStartInfo.WindowStyle = ProcessWindowStyle.Hidden; // Récupérer la durée totale de la vidéo et sa résolution if (!sourceFile.VideoDuration.HasValue) { string imageOutput = Path.ChangeExtension(sourceFilePath, ".jpeg"); processStartInfo.Arguments = $"-y -i {Path.GetFileName(sourceFilePath)} -vf fps=1 -vframes 1 {Path.GetFileName(imageOutput)}"; StartProcess(processStartInfo, VideoSettings.EncodeGetOneImageTimeout); using (Image image = Image.FromFile(imageOutput)) { sourceFile.VideoWidth = image.Width; sourceFile.VideoHeight = image.Height; } TempFileManager.SafeDeleteTempFile(imageOutput); } // Si durée totale de vidéo, largeur hauteur non récupéré, on ne peut pas continuer if ((sourceFile.VideoDuration ?? 0) <= 0) { return(false); } if ((sourceFile.VideoHeight ?? 0) <= 0) { return(false); } if ((sourceFile.VideoHeight ?? 0) <= 0) { return(false); } int duration = sourceFile.VideoDuration.Value; // Désactivation encoding et sprite si dépassement de la durée maximale if (duration > VideoSettings.MaxVideoDurationForEncoding) { currentFileItem.EncodeErrorMessage = "Disable because duration reach the max limit."; currentFileItem.FileContainer.EncodedFileItems.Clear(); currentFileItem.FileContainer.DeleteSpriteVideo(); return(false); } switch (currentFileItem.TypeFile) { case TypeFile.SpriteVideo: { int nbImages = VideoSettings.NbSpriteImages; int heightSprite = VideoSettings.HeightSpriteImages; // Calculer nb image/s // si < 100s de vidéo -> 1 image/s // sinon (nb secondes de la vidéo / 100) image/s string frameRate = "1"; if (duration > nbImages) { frameRate = $"{nbImages}/{duration}"; //frameRate = inverse de image/s } int spriteWidth = GetWidth(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, heightSprite); string sizeImageMax = $"scale={spriteWidth}:{heightSprite}"; // Extract frameRate image/s de la video string pattern = GetPattern(newEncodedFilePath); processStartInfo.Arguments = $"-y -i {Path.GetFileName(sourceFilePath)} -r {frameRate} -vf \"{sizeImageMax}\" -f image2 {pattern}"; StartProcess(processStartInfo, VideoSettings.EncodeGetImagesTimeout); break; } case TypeFile.EncodedVideo: { string size; switch (videoSize) { case VideoSize.F360p: { Tuple <int, int> finalSize = GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 640, 360); size = $"{finalSize.Item1}:{finalSize.Item2}"; break; } case VideoSize.F480p: { Tuple <int, int> finalSize = GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 854, 480); size = $"{finalSize.Item1}:{finalSize.Item2}"; break; } case VideoSize.F720p: { Tuple <int, int> finalSize = GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 1280, 720); size = $"{finalSize.Item1}:{finalSize.Item2}"; break; } case VideoSize.F1080p: { Tuple <int, int> finalSize = GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 1920, 1080); size = $"{finalSize.Item1}:{finalSize.Item2}"; break; } default: throw new InvalidOperationException("Format non reconnu."); } string formatEncode = "libx264"; if (VideoSettings.GpuEncodeMode) { string maxRate = string.Empty; formatEncode = "h264_nvenc"; switch (videoSize) { case VideoSize.F360p: maxRate = "200k"; break; case VideoSize.F480p: maxRate = "500k"; break; case VideoSize.F720p: maxRate = "1000k"; break; case VideoSize.F1080p: maxRate = "1600k"; break; default: throw new InvalidOperationException("Format non reconnu."); } processStartInfo.Arguments = $"-y -i {Path.GetFileName(sourceFilePath)} -vcodec {formatEncode} -vf \"scale={size},format=yuv420p\" -b:v {maxRate} -maxrate {maxRate} -bufsize {maxRate} -acodec aac {Path.GetFileName(newEncodedFilePath)}"; } else { processStartInfo.Arguments = $"-y -i {Path.GetFileName(sourceFilePath)} -vcodec {formatEncode} -vf \"scale={size},format=yuv420p\" -acodec aac {Path.GetFileName(newEncodedFilePath)}"; } StartProcess(processStartInfo, VideoSettings.EncodeTimeout); break; } default: throw new InvalidOperationException("type non prévu"); } currentFileItem.FilePath = newEncodedFilePath; currentFileItem.EncodeProgress = "100.00%"; switch (currentFileItem.TypeFile) { case TypeFile.SpriteVideo: LogManager.AddEncodingMessage("Video Duration " + duration + " / SourceVideoFileSize " + currentFileItem.FileContainer.SourceFileItem.FileSize, "End Extract Images"); break; case TypeFile.EncodedVideo: LogManager.AddEncodingMessage("Video Duration " + duration + " / FileSize " + currentFileItem.FileSize + " / Format " + videoSize, "End Encoding"); break; default: throw new InvalidOperationException("type non prévu"); } return(true); } catch (Exception ex) { LogManager.AddEncodingMessage("Video Duration " + currentFileItem.VideoDuration + " / FileSize " + currentFileItem.FileSize + " / Progress " + currentFileItem.EncodeProgress + " / Exception : " + ex, "Exception"); currentFileItem.EncodeErrorMessage = ex.Message; TempFileManager.SafeDeleteTempFile(newEncodedFilePath); if (currentFileItem.VideoSize != VideoSize.Source) { TempFileManager.SafeDeleteTempFile(currentFileItem.FilePath); } if (currentFileItem.TypeFile == TypeFile.SpriteVideo) { string[] files = EncodeManager.GetListImageFrom(newEncodedFilePath); // récupération des images TempFileManager.SafeDeleteTempFiles(files); // suppression des images } return(false); } }