Esempio n. 1
0
        static async Task AiFinished(string aiName)
        {
            if (Interpolate.canceled)
            {
                return;
            }
            Program.mainForm.SetProgress(100);
            AiProcessSuspend.SetRunning(false);
            int interpFramesFiles = IoUtils.GetAmountOfFiles(Interpolate.current.interpFolder, false, "*" + Interpolate.current.interpExt);
            int interpFramesCount = interpFramesFiles + InterpolationProgress.deletedFramesCount;

            InterpolationProgress.UpdateInterpProgress(interpFramesCount, InterpolationProgress.targetFrames);
            string logStr = $"Done running {aiName} - Interpolation took {FormatUtils.Time(processTime.Elapsed)}. Peak Output FPS: {InterpolationProgress.peakFpsOut.ToString("0.00")}";

            if (Interpolate.currentlyUsingAutoEnc && AutoEncode.HasWorkToDo())
            {
                logStr += " - Waiting for encoding to finish...";
                Program.mainForm.SetStatus("Creating output video from frames...");
            }

            Logger.Log(logStr);
            processTime.Stop();

            if (interpFramesCount < 3)
            {
                string[] logLines = File.ReadAllLines(Path.Combine(Paths.GetLogPath(), lastLogName + ".txt"));
                string   log      = string.Join("\n", logLines.Reverse().Take(10).Reverse().Select(x => x.Split("]: ").Last()).ToList());
                string   amount   = interpFramesCount > 0 ? $"Only {interpFramesCount}" : "No";
                Interpolate.Cancel($"Interpolation failed - {amount} interpolated frames were created.\n\n\nLast 10 log lines:\n{log}\n\nCheck the log '{lastLogName}' for more details.");
                return;
            }

            try
            {
                while (Interpolate.currentlyUsingAutoEnc && Program.busy)
                {
                    if (AvProcess.lastAvProcess != null && !AvProcess.lastAvProcess.HasExited)
                    {
                        if (Logger.LastLogLine.ToLower().Contains("frame: "))
                        {
                            Logger.Log(FormatUtils.BeautifyFfmpegStats(Logger.LastLogLine), false, Logger.LastUiLine.ToLower().Contains("frame"));
                        }
                    }

                    if (AvProcess.lastAvProcess.HasExited && !AutoEncode.HasWorkToDo())     // Stop logging if ffmpeg is not running & AE is done
                    {
                        break;
                    }

                    await Task.Delay(500);
                }
            }
            catch (Exception e)
            {
                Logger.Log($"AiFinished encoder logging error: {e.Message}\n{e.StackTrace}", true);
            }
        }
Esempio n. 2
0
        public static void LogOutput(string line, ref string appendStr, string logFilename, LogMode logMode, bool showProgressBar)
        {
            if (Interpolate.canceled || string.IsNullOrWhiteSpace(line) || line.Trim().Length < 1)
            {
                return;
            }

            bool hidden = logMode == LogMode.Hidden;

            if (HideMessage(line)) // Don't print certain warnings
            {
                hidden = true;
            }

            bool replaceLastLine = logMode == LogMode.OnlyLastLine;

            if (line.Contains("time=") && (line.StartsWith("frame=") || line.StartsWith("size=")))
            {
                line = FormatUtils.BeautifyFfmpegStats(line);
            }

            appendStr += Environment.NewLine + line;
            Logger.Log($"{prefix} {line}", hidden, replaceLastLine, logFilename);

            if (!hidden && showProgressBar && line.Contains("Time:"))
            {
                Regex timeRegex = new Regex("(?<=Time:).*(?= )");
                UpdateFfmpegProgress(timeRegex.Match(line).Value);
            }


            if (line.Contains("Unable to"))
            {
                Interpolate.Cancel($"Error: {line}");
                return;
            }

            if (line.Contains("Could not open file"))
            {
                Interpolate.Cancel($"Error: {line}");
                return;
            }

            if (line.Contains("No NVENC capable devices found") || line.MatchesWildcard("*nvcuda.dll*"))
            {
                Interpolate.Cancel($"Error: {line}\n\nMake sure you have an NVENC-capable Nvidia GPU.");
                return;
            }

            if (line.Contains("not currently supported in container") || line.Contains("Unsupported codec id"))
            {
                Interpolate.Cancel($"Error: {line}\n\nIt looks like you are trying to copy a stream into a container that doesn't support this codec.");
                return;
            }

            if (line.Contains("Subtitle encoding currently only possible from text to text or bitmap to bitmap"))
            {
                Interpolate.Cancel($"Error: {line}\n\nYou cannot encode image-based subtitles into text-based subtitles. Please use the Copy Subtitles option instead, with a compatible container.");
                return;
            }

            if (line.Contains("Only VP8 or VP9 or AV1 video and Vorbis or Opus audio and WebVTT subtitles are supported for WebM"))
            {
                Interpolate.Cancel($"Error: {line}\n\nIt looks like you are trying to copy an unsupported stream into WEBM!");
                return;
            }

            if (line.MatchesWildcard("*codec*not supported*"))
            {
                Interpolate.Cancel($"Error: {line}\n\nTry using a different codec.");
                return;
            }

            if (line.Contains("GIF muxer supports only a single video GIF stream"))
            {
                Interpolate.Cancel($"Error: {line}\n\nYou tried to mux a non-GIF stream into a GIF file.");
                return;
            }

            if (line.Contains("Width and height of input videos must be same"))
            {
                Interpolate.Cancel($"Error: {line}");
                return;
            }
        }