Exemple #1
0
        protected override void ProcessItem(FileItem fileItem)
        {
            // si le client a pas demandé le progress depuis plus de 20s, annuler l'opération
            if (!fileItem.AudioCpuEncodeProcess.CanProcess())
            {
                string message = "FileName " + Path.GetFileName(fileItem.OutputFilePath) + " car le client est déconnecté";
                LogManager.AddEncodingMessage(message, "Annulation");
                fileItem.AudioCpuEncodeProcess.CancelCascade("Le client est déconnecté.");
                return;
            }

            if (EncodeManager.AudioCpuEncoding(fileItem))
            {
                VideoGpuEncodeDaemon.Instance.Queue(fileItem, "waiting video encoding...");
            }
        }
 public async Task <IActionResult> UploadVideo(string videoEncodingFormats = null, bool?sprite = null)
 {
     try
     {
         return(Ok(new
         {
             success = true, token = VideoManager.ComputeVideo(await GetFileToTemp(), videoEncodingFormats, sprite)
         }));
     }
     catch (Exception ex)
     {
         LogManager.AddEncodingMessage(LogLevel.Critical, "Exception non gérée", "Exception", ex);
         return(BadRequest(new
         {
             errorMessage = ex.Message
         }));
     }
 }
Exemple #3
0
        public void StartProcess(string arguments, int timeout)
        {
            var processStartInfo = new ProcessStartInfo();

            processStartInfo.FileName = "ffmpeg";

            processStartInfo.RedirectStandardError = true;
            processStartInfo.WorkingDirectory      = TempFileManager.GetTempDirectory();

            processStartInfo.UseShellExecute = false;
            processStartInfo.ErrorDialog     = false;
            processStartInfo.CreateNoWindow  = true;
            processStartInfo.WindowStyle     = ProcessWindowStyle.Hidden;

            processStartInfo.Arguments = arguments;

            if (_fileItem.TypeFile == TypeFile.SpriteVideo)
            {
                LogManager.AddSpriteMessage(processStartInfo.FileName + " " + processStartInfo.Arguments, "Launch command");
            }
            else
            {
                LogManager.AddEncodingMessage(processStartInfo.FileName + " " + processStartInfo.Arguments, "Launch command");
            }

            using (Process process = Process.Start(processStartInfo))
            {
                process.ErrorDataReceived += new DataReceivedEventHandler(ErrorDataReceived);

                process.BeginErrorReadLine();

                bool success = process.WaitForExit(timeout);
                if (!success)
                {
                    throw new InvalidOperationException("Timeout : Le fichier n'a pas pu être encodé dans le temps imparti.");
                }

                if (process.ExitCode != 0)
                {
                    throw new InvalidOperationException($"Le fichier n'a pas pu être encodé, erreur {process.ExitCode}.");
                }
            }
        }
Exemple #4
0
        public static bool SuccessAnalyseSource(FileItem sourceFile, ProcessItem processItem)
        {
            if (sourceFile == null)
            {
                throw new ArgumentNullException(nameof(sourceFile));
            }
            if (sourceFile == null)
            {
                throw new ArgumentNullException(nameof(sourceFile));
            }
            if (!sourceFile.IsSource)
            {
                throw new ArgumentException("Doit être le fichier source", nameof(sourceFile));
            }

            // Récupérer la durée totale de la vidéo et sa résolution
            try
            {
                var ffProbeProcessManager = new FfProbeProcessManager(sourceFile);
                ffProbeProcessManager.FillInfo(VideoSettings.Instance.FfProbeTimeout);
            }
            catch (Exception ex)
            {
                LogManager.AddEncodingMessage(LogLevel.Critical, "Exception non gérée", "Exception source info", ex);
            }

            // Si durée totale de vidéo, largeur hauteur non récupéré, on ne peut pas continuer
            if (!sourceFile.SuccessGetSourceInfo())
            {
                string message     = "Error while source video information.";
                string longMessage = message + " FileName : " + Path.GetFileName(sourceFile.SourceFilePath);
                processItem.SetErrorMessage(message, longMessage);
                return(false);
            }

            LogManager.AddEncodingMessage(LogLevel.Information, "SourceVideoDuration " + sourceFile.VideoDuration.Value + " / SourceVideoFileSize " + sourceFile.FileSize, "Info source");

            return(true);
        }
Exemple #5
0
        public static bool AudioCpuEncoding(FileItem fileItem)
        {
            try
            {
                LogManager.AddEncodingMessage(LogLevel.Information, "SourceFilePath " + Path.GetFileName(fileItem.SourceFilePath), "Start AudioCpuEncoding");
                fileItem.AudioCpuEncodeProcess.StartProcessDateTime();

                if (fileItem.FileContainer.SourceFileItem.AudioCodec == "aac")
                {
                    fileItem.AudioCpuEncodeProcess.StartProcessDateTime();
                    fileItem.SetTempFilePath(fileItem.SourceFilePath);
                }
                else
                {
                    if (fileItem.VideoCodec.ToLower() == "vp8" || fileItem.VideoCodec.ToLower() == "vp9")
                    {
                        fileItem.SetTempFilePath(fileItem.TempFilePath.Replace(".mp4", ".mkv"));
                    }

                    // encoding audio de la source
                    string arguments            = $"-y -i {Path.GetFileName(fileItem.SourceFilePath)} -vcodec copy -acodec aac -strict -2 {Path.GetFileName(fileItem.TempFilePath)}";
                    var    ffmpegProcessManager = new FfmpegProcessManager(fileItem, fileItem.AudioCpuEncodeProcess);
                    ffmpegProcessManager.StartProcess(arguments, VideoSettings.Instance.EncodeTimeout);
                }

                fileItem.SetVideoAacTempFilePath(fileItem.TempFilePath);
                LogManager.AddEncodingMessage(LogLevel.Information, "OutputFileName " + Path.GetFileName(fileItem.VideoAacTempFilePath) + " / FileSize " + fileItem.FileSize + " / Format " + fileItem.VideoSize, "End AudioCpuEncoding");
                fileItem.AudioCpuEncodeProcess.EndProcessDateTime();

                return(true);
            }
            catch (Exception ex)
            {
                string message = "Exception AudioCpuEncoding : Video Duration " + fileItem.VideoDuration + " / FileSize " + fileItem.FileSize + " / Progress " + fileItem.AudioCpuEncodeProcess.Progress;
                fileItem.AudioCpuEncodeProcess.SetErrorMessage("Exception non gérée", message, ex);
                return(false);
            }
        }
Exemple #6
0
        protected override void ProcessItem(FileItem fileItem)
        {
            // si le client a pas demandé le progress depuis plus de 20s, annuler l'opération
            if (!fileItem.AudioVideoCpuEncodeProcess.CanProcess())
            {
                string message = "FileName " + Path.GetFileName(fileItem.OutputFilePath) + " car le client est déconnecté";
                LogManager.AddEncodingMessage(message, "Annulation");
                fileItem.AudioVideoCpuEncodeProcess.CancelCascade("Le client est déconnecté.");
                return;
            }

            if (EncodeManager.AudioVideoCpuEncoding(fileItem))
            {
                // rechercher le 480p pour le sprite
                if(fileItem.VideoSize == VideoSize.F480p && fileItem.FileContainer.SpriteVideoFileItem != null)
                {
                    fileItem.FileContainer.SpriteVideoFileItem.SetSourceFilePath(fileItem.OutputFilePath);
                    SpriteDaemon.Instance.Queue(fileItem.FileContainer.SpriteVideoFileItem, "Waiting sprite creation...");
                }

                IpfsDaemon.Instance.Queue(fileItem);
            }
        }
Exemple #7
0
 protected override void LogException(FileItem fileItem, Exception ex)
 {
     LogManager.AddEncodingMessage(ex.ToString(), "Exception non gérée");                        
     fileItem.AudioVideoCpuEncodeProcess.SetErrorMessage("Exception non gérée");
 }
Exemple #8
0
        private static void Start()
        {
            daemon = Task.Run(() =>
            {
                while (true)
                {
                    try
                    {
                        Thread.Sleep(1000);

                        FileItem fileItem;

                        if (!queueFileItems.TryDequeue(out fileItem))
                        {
                            continue;
                        }

                        CurrentPositionInQueue++;

                        // si le client a pas demandé le progress depuis plus de 20s, annuler l'opération
                        if ((DateTime.UtcNow - fileItem.FileContainer.LastTimeProgressRequested).TotalSeconds > FrontSettings.MaxGetProgressCanceled)
                        {
                            fileItem.EncodeErrorMessage = "Canceled";
                            fileItem.EncodeProgress     = null;

                            fileItem.IpfsErrorMessage = "Canceled";
                            fileItem.IpfsProgress     = null;

                            continue;
                        }

                        // encode video
                        if (!EncodeManager.Encode(fileItem))
                        {
                            continue;
                        }

                        switch (fileItem.TypeFile)
                        {
                        case TypeFile.SpriteVideo:
                            {
                                string[] files     = EncodeManager.GetListImageFrom(fileItem.FilePath);                                                          // récupération des images
                                string outputPath  = Path.ChangeExtension(TempFileManager.GetNewTempFilePath(), ".jpeg");                                        // nom du fichier sprite
                                bool successSprite = SpriteManager.CombineBitmap(files.Skip(files.Length - VideoSettings.NbSpriteImages).ToArray(), outputPath); // création du sprite
                                TempFileManager.SafeDeleteTempFiles(files);                                                                                      // suppression des images
                                if (successSprite)
                                {
                                    fileItem.FilePath = outputPath;     // réaffectation chemin sprite
                                    LogManager.AddEncodingMessage("FileSize " + fileItem.FileSize, "End Sprite");
                                    IpfsDaemon.Queue(fileItem);
                                }
                                else
                                {
                                    TempFileManager.SafeDeleteTempFile(outputPath);
                                }

                                break;
                            }

                        case TypeFile.EncodedVideo:
                            IpfsDaemon.Queue(fileItem);
                            break;

                        default:
                            throw new InvalidOperationException("type non prévu");
                        }
                    }
                    catch (Exception ex)
                    {
                        LogManager.AddEncodingMessage(ex.ToString(), "Exception non gérée");
                    }
                }
            });
        }
Exemple #9
0
        public static bool Encode(FileItem fileItem)
        {
            string newEncodedFilePath = null;

            try
            {
                currentFileItem = fileItem;
                currentFileItem.EncodeProgress = "0.00%";

                FileItem sourceFile     = currentFileItem.FileContainer.SourceFileItem;
                string   sourceFilePath = sourceFile.FilePath;
                newEncodedFilePath = Path.ChangeExtension(TempFileManager.GetNewTempFilePath(), ".mp4");
                LogManager.AddEncodingMessage("FileName " + Path.GetFileName(newEncodedFilePath), "Start");
                VideoSize videoSize = currentFileItem.VideoSize;

                Debug.WriteLine(Path.GetFileName(sourceFilePath) + " / " + videoSize);

                var processStartInfo = new ProcessStartInfo();
                processStartInfo.FileName = "ffmpeg";

                processStartInfo.RedirectStandardError = true;
                processStartInfo.WorkingDirectory      = TempFileManager.GetTempDirectory();

                processStartInfo.UseShellExecute = false;
                processStartInfo.ErrorDialog     = false;
                processStartInfo.CreateNoWindow  = true;
                processStartInfo.WindowStyle     = ProcessWindowStyle.Hidden;

                // Récupérer la durée totale de la vidéo et sa résolution
                if (!sourceFile.VideoDuration.HasValue)
                {
                    string imageOutput = Path.ChangeExtension(sourceFilePath, ".jpeg");
                    processStartInfo.Arguments = $"-y -i {Path.GetFileName(sourceFilePath)} -vf fps=1 -vframes 1 {Path.GetFileName(imageOutput)}";

                    StartProcess(processStartInfo, VideoSettings.EncodeGetOneImageTimeout);

                    using (Image image = Image.FromFile(imageOutput))
                    {
                        sourceFile.VideoWidth  = image.Width;
                        sourceFile.VideoHeight = image.Height;
                    }
                    TempFileManager.SafeDeleteTempFile(imageOutput);
                }

                // Si durée totale de vidéo, largeur hauteur non récupéré, on ne peut pas continuer
                if ((sourceFile.VideoDuration ?? 0) <= 0)
                {
                    return(false);
                }
                if ((sourceFile.VideoHeight ?? 0) <= 0)
                {
                    return(false);
                }
                if ((sourceFile.VideoHeight ?? 0) <= 0)
                {
                    return(false);
                }

                int duration = sourceFile.VideoDuration.Value;

                // Désactivation encoding et sprite si dépassement de la durée maximale
                if (duration > VideoSettings.MaxVideoDurationForEncoding)
                {
                    currentFileItem.EncodeErrorMessage = "Disable because duration reach the max limit.";
                    currentFileItem.FileContainer.EncodedFileItems.Clear();
                    currentFileItem.FileContainer.DeleteSpriteVideo();
                    return(false);
                }

                switch (currentFileItem.TypeFile)
                {
                case TypeFile.SpriteVideo:
                {
                    int nbImages     = VideoSettings.NbSpriteImages;
                    int heightSprite = VideoSettings.HeightSpriteImages;

                    // Calculer nb image/s
                    //  si < 100s de vidéo -> 1 image/s
                    //  sinon (nb secondes de la vidéo / 100) image/s
                    string frameRate = "1";
                    if (duration > nbImages)
                    {
                        frameRate = $"{nbImages}/{duration}";         //frameRate = inverse de image/s
                    }

                    int    spriteWidth  = GetWidth(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, heightSprite);
                    string sizeImageMax = $"scale={spriteWidth}:{heightSprite}";

                    // Extract frameRate image/s de la video
                    string pattern = GetPattern(newEncodedFilePath);
                    processStartInfo.Arguments = $"-y -i {Path.GetFileName(sourceFilePath)} -r {frameRate} -vf \"{sizeImageMax}\" -f image2 {pattern}";

                    StartProcess(processStartInfo, VideoSettings.EncodeGetImagesTimeout);
                    break;
                }

                case TypeFile.EncodedVideo:
                {
                    string size;
                    switch (videoSize)
                    {
                    case VideoSize.F360p:
                    {
                        Tuple <int, int> finalSize = GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 640, 360);
                        size = $"{finalSize.Item1}:{finalSize.Item2}";
                        break;
                    }

                    case VideoSize.F480p:
                    {
                        Tuple <int, int> finalSize = GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 854, 480);
                        size = $"{finalSize.Item1}:{finalSize.Item2}";
                        break;
                    }

                    case VideoSize.F720p:
                    {
                        Tuple <int, int> finalSize = GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 1280, 720);
                        size = $"{finalSize.Item1}:{finalSize.Item2}";
                        break;
                    }

                    case VideoSize.F1080p:
                    {
                        Tuple <int, int> finalSize = GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 1920, 1080);
                        size = $"{finalSize.Item1}:{finalSize.Item2}";
                        break;
                    }

                    default:
                        throw new InvalidOperationException("Format non reconnu.");
                    }

                    string formatEncode = "libx264";
                    if (VideoSettings.GpuEncodeMode)
                    {
                        string maxRate = string.Empty;
                        formatEncode = "h264_nvenc";
                        switch (videoSize)
                        {
                        case VideoSize.F360p:
                            maxRate = "200k";
                            break;

                        case VideoSize.F480p:
                            maxRate = "500k";
                            break;

                        case VideoSize.F720p:
                            maxRate = "1000k";
                            break;

                        case VideoSize.F1080p:
                            maxRate = "1600k";
                            break;

                        default:
                            throw new InvalidOperationException("Format non reconnu.");
                        }

                        processStartInfo.Arguments = $"-y -i {Path.GetFileName(sourceFilePath)} -vcodec {formatEncode} -vf \"scale={size},format=yuv420p\" -b:v {maxRate} -maxrate {maxRate} -bufsize {maxRate} -acodec aac {Path.GetFileName(newEncodedFilePath)}";
                    }
                    else
                    {
                        processStartInfo.Arguments = $"-y -i {Path.GetFileName(sourceFilePath)} -vcodec {formatEncode} -vf \"scale={size},format=yuv420p\" -acodec aac {Path.GetFileName(newEncodedFilePath)}";
                    }

                    StartProcess(processStartInfo, VideoSettings.EncodeTimeout);
                    break;
                }

                default:
                    throw new InvalidOperationException("type non prévu");
                }

                currentFileItem.FilePath       = newEncodedFilePath;
                currentFileItem.EncodeProgress = "100.00%";
                switch (currentFileItem.TypeFile)
                {
                case TypeFile.SpriteVideo:
                    LogManager.AddEncodingMessage("Video Duration " + duration + " / SourceVideoFileSize " + currentFileItem.FileContainer.SourceFileItem.FileSize, "End Extract Images");
                    break;

                case TypeFile.EncodedVideo:
                    LogManager.AddEncodingMessage("Video Duration " + duration + " / FileSize " + currentFileItem.FileSize + " / Format " + videoSize, "End Encoding");
                    break;

                default:
                    throw new InvalidOperationException("type non prévu");
                }

                return(true);
            }
            catch (Exception ex)
            {
                LogManager.AddEncodingMessage("Video Duration " + currentFileItem.VideoDuration + " / FileSize " + currentFileItem.FileSize + " / Progress " + currentFileItem.EncodeProgress + " / Exception : " + ex, "Exception");
                currentFileItem.EncodeErrorMessage = ex.Message;

                TempFileManager.SafeDeleteTempFile(newEncodedFilePath);

                if (currentFileItem.VideoSize != VideoSize.Source)
                {
                    TempFileManager.SafeDeleteTempFile(currentFileItem.FilePath);
                }

                if (currentFileItem.TypeFile == TypeFile.SpriteVideo)
                {
                    string[] files = EncodeManager.GetListImageFrom(newEncodedFilePath); // récupération des images
                    TempFileManager.SafeDeleteTempFiles(files);                          // suppression des images
                }

                return(false);
            }
        }
        public static bool Encode(FileItem fileItem)
        {
            string newEncodedFilePath = null;

            try
            {
                fileItem.EncodeProgress = "0.00%";

                FileItem  sourceFile     = fileItem.FileContainer.SourceFileItem;
                string    sourceFilePath = sourceFile.FilePath;
                VideoSize videoSize      = fileItem.VideoSize;
                LogManager.AddEncodingMessage("SourceFilePath " + Path.GetFileName(sourceFilePath) + " -> " + videoSize, "Start");

                // Récupérer la durée totale de la vidéo et sa résolution, autorisation encoding
                if (!VideoSourceManager.CheckAndAnalyseSource(fileItem, false))
                {
                    return(false);
                }

                string size;
                string maxRate = string.Empty;
                switch (videoSize)
                {
                case VideoSize.F360p:
                {
                    maxRate = "200k";
                    Tuple <int, int> finalSize = SizeHelper.GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 640, 360);
                    size = $"{finalSize.Item1}:{finalSize.Item2}";
                    break;
                }

                case VideoSize.F480p:
                {
                    maxRate = "500k";
                    Tuple <int, int> finalSize = SizeHelper.GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 854, 480);
                    size = $"{finalSize.Item1}:{finalSize.Item2}";
                    break;
                }

                case VideoSize.F720p:
                {
                    maxRate = "1000k";
                    Tuple <int, int> finalSize = SizeHelper.GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 1280, 720);
                    size = $"{finalSize.Item1}:{finalSize.Item2}";
                    break;
                }

                case VideoSize.F1080p:
                {
                    maxRate = "1600k";
                    Tuple <int, int> finalSize = SizeHelper.GetSize(sourceFile.VideoWidth.Value, sourceFile.VideoHeight.Value, 1920, 1080);
                    size = $"{finalSize.Item1}:{finalSize.Item2}";
                    break;
                }

                default:
                    throw new InvalidOperationException("Format non reconnu.");
                }

                newEncodedFilePath = Path.ChangeExtension(TempFileManager.GetNewTempFilePath(), ".mp4");
                string arguments;
                if (VideoSettings.GpuEncodeMode)
                {
                    arguments = $"-y -hwaccel cuvid -vcodec h264_cuvid -vsync 0 -i {Path.GetFileName(sourceFilePath)} -vf \"scale_npp={size},format=yuv420p\" -b:v {maxRate} -maxrate {maxRate} -bufsize {maxRate} -vcodec h264_nvenc -acodec copy {Path.GetFileName(newEncodedFilePath)}";
                }
                else
                {
                    arguments = $"-y -i {Path.GetFileName(sourceFilePath)} -vf \"scale={size},format=yuv420p\" -vcodec libx264 -acodec aac {Path.GetFileName(newEncodedFilePath)}";
                }

                var ffmpegProcessManager = new FfmpegProcessManager(fileItem);
                ffmpegProcessManager.StartProcess(arguments, VideoSettings.EncodeTimeout);

                fileItem.FilePath = newEncodedFilePath;
                LogManager.AddEncodingMessage("OutputFileName " + Path.GetFileName(newEncodedFilePath) + " / FileSize " + fileItem.FileSize + " / Format " + videoSize, "End Encoding");

                fileItem.EncodeProgress = "100.00%";
                return(true);
            }
            catch (Exception ex)
            {
                LogManager.AddEncodingMessage("Video Duration " + fileItem.VideoDuration + " / FileSize " + fileItem.FileSize + " / Progress " + fileItem.EncodeProgress + " / Exception : " + ex, "Exception");
                fileItem.EncodeErrorMessage = "Exception";
                TempFileManager.SafeDeleteTempFile(newEncodedFilePath);
                fileItem.CleanFiles();
                return(false);
            }
        }