Ejemplo n.º 1
0
        public static string GetCurrentExportFilename(bool fpsLimit, bool withExt)
        {
            InterpSettings curr = Interpolate.current;
            float          fps  = fpsLimit ? Config.GetFloat("maxFps") : curr.outFps;

            string pattern   = Config.Get("exportNamePattern");
            string inName    = Interpolate.current.inputIsFrames ? Path.GetFileName(curr.inPath) : Path.GetFileNameWithoutExtension(curr.inPath);
            bool   addSuffix = fpsLimit && (!pattern.Contains("[FPS]") && !pattern.Contains("[ROUNDFPS]"));
            string filename  = pattern;

            filename = filename.Replace("[NAME]", inName);
            filename = filename.Replace("[NAMEWITHEXT]", Path.GetFileName(curr.inPath));
            filename = filename.Replace("[FACTOR]", curr.interpFactor.ToStringDot());
            filename = filename.Replace("[AI]", curr.ai.aiNameShort.ToUpper());
            filename = filename.Replace("[MODEL]", curr.model);
            filename = filename.Replace("[FPS]", fps.ToStringDot());
            filename = filename.Replace("[ROUNDFPS]", fps.RoundToInt().ToString());

            if (addSuffix)
            {
                filename += Paths.fpsLimitSuffix;
            }

            if (withExt)
            {
                filename += FFmpegUtils.GetExt(curr.outMode);
            }

            return(filename);
        }
Ejemplo n.º 2
0
        private MediaMetaData GetVideoMetaData(string mediaItemFilePath, string ffmpegFolder)
        {
            Unosquare.FFME.MediaEngine.FFmpegDirectory = ffmpegFolder;
            Unosquare.FFME.MediaEngine.LoadFFmpeg();

            try
            {
                var info = Unosquare.FFME.MediaEngine.RetrieveMediaInfo(FFmpegUtils.FixUnicodeString(mediaItemFilePath));

                string title = null;
                info.Metadata?.TryGetValue("title", out title);

                if (string.IsNullOrEmpty(title))
                {
                    title = Path.GetFileNameWithoutExtension(mediaItemFilePath);
                }

                return(new MediaMetaData
                {
                    Title = StripNewLines(title),
                    Duration = info.Duration
                });
            }
            catch (MediaContainerException)
            {
                // file is in use...
                throw new VideoFileInUseException();
            }
        }
Ejemplo n.º 3
0
        public static async Task ChunksToVideos(string tempFolder, string chunksFolder, string baseOutPath)
        {
            if (IOUtils.GetAmountOfFiles(chunksFolder, true, $"*{FFmpegUtils.GetExt(I.current.outMode)}") < 1)
            {
                I.Cancel("No video chunks found - An error must have occured during chunk encoding!", AiProcess.hasShownError);
                return;
            }

            await Task.Delay(10);

            Program.mainForm.SetStatus("Merging video chunks...");
            try
            {
                DirectoryInfo chunksDir = new DirectoryInfo(chunksFolder);
                foreach (DirectoryInfo dir in chunksDir.GetDirectories())
                {
                    string suffix            = dir.Name.Replace("chunks", "");
                    string tempConcatFile    = Path.Combine(tempFolder, $"chunks-concat{suffix}.ini");
                    string concatFileContent = "";
                    foreach (string vid in IOUtils.GetFilesSorted(dir.FullName))
                    {
                        concatFileContent += $"file '{Paths.chunksDir}/{dir.Name}/{Path.GetFileName(vid)}'\n";
                    }
                    File.WriteAllText(tempConcatFile, concatFileContent);

                    Logger.Log($"CreateVideo: Running MergeChunks() for vfrFile '{Path.GetFileName(tempConcatFile)}'", true);
                    await MergeChunks(tempConcatFile, baseOutPath.FilenameSuffix(suffix));
                }
            }
            catch (Exception e)
            {
                Logger.Log("ChunksToVideo Error: " + e.Message, false);
                MessageBox.Show("An error occured while trying to merge the video chunks.\nCheck the log for details.");
            }
        }
Ejemplo n.º 4
0
        public override void Initialize()
        {
            base.Initialize();

            // Initialize ffmpeg
            FFmpegUtils.PreloadLibraries();
            FFmpegUtils.Initialize();
        }
Ejemplo n.º 5
0
        public static async Task <bool> CheckEncoderValid()
        {
            string enc = FFmpegUtils.GetEnc(FFmpegUtils.GetCodec(I.current.outMode));

            if (!enc.ToLower().Contains("nvenc"))
            {
                return(true);
            }

            if (!(await FfmpegCommands.IsEncoderCompatible(enc)))
            {
                ShowMessage("NVENC encoding is not available on your hardware!\nPlease use a different encoder.", "Error");
                I.Cancel();
                return(false);
            }

            return(true);
        }
 private VideoFromFileTemplateGenerator()
 {
     // Initialize ffmpeg
     FFmpegUtils.PreloadLibraries();
     FFmpegUtils.Initialize();
 }
Ejemplo n.º 7
0
            /// <inheritdoc />
            protected override async Task <ResultStatus> DoCommandOverride(ICommandContext commandContext)
            {
                VideoAsset videoAsset = Parameters.Video;

                try
                {
                    // Get path to ffmpeg
                    var ffmpeg = ToolLocator.LocateTool("ffmpeg.exe")?.ToWindowsPath() ?? throw new AssetException("Failed to compile a video asset, ffmpeg was not found.");

                    // Get absolute path of asset source on disk
                    var assetDirectory = videoAsset.Source.GetParent();
                    var assetSource    = UPath.Combine(assetDirectory, videoAsset.Source);

                    //=====================================================================================
                    //Get the info from the video codec

                    //Check if we need to reencode the video
                    var mustReEncodeVideo    = false;
                    var sidedataStripCommand = "";

                    // check that the video file format is supported
                    if (Parameters.Platform == PlatformType.Windows && videoAsset.Source.GetFileExtension() != ".mp4")
                    {
                        mustReEncodeVideo = true;
                    }

                    //Use FFmpegMedia object (need to check more details first before I can use it)
                    VideoStream videoStream = null;
                    AudioStream audioStream = null;
                    FFmpegUtils.PreloadLibraries();
                    FFmpegUtils.Initialize();
                    using (var media = new FFmpegMedia())
                    {
                        media.Open(assetSource.ToWindowsPath());

                        // Get the first video stream
                        videoStream = media.Streams.OfType <VideoStream>().FirstOrDefault();
                        if (videoStream == null)
                        {
                            throw new AssetException("Failed to compile a video asset. Did not find the VideoStream from the media.");
                        }

                        // On windows MediaEngineEx player only decode the first video if the video is detected as a stereoscopic video,
                        // so we remove the tags inside the video in order to ensure the same behavior as on other platforms (side by side decoded texture)
                        // Unfortunately it does seem possible to disable this behavior from the MediaEngineEx API.
                        if (Parameters.Platform == PlatformType.Windows && media.IsStereoscopicVideo(videoStream))
                        {
                            mustReEncodeVideo    = true;
                            sidedataStripCommand = "-vf sidedata=delete";
                        }

                        // Get the first audio stream
                        audioStream = media.Streams.OfType <AudioStream>().FirstOrDefault();
                    }
                    Size2 videoSize = new Size2(videoStream.Width, videoStream.Height);

                    //check the format
                    if (ListSupportedCodecNames != null)
                    {
                        if (Array.IndexOf(ListSupportedCodecNames, videoStream.Codec) < 0)
                        {
                            mustReEncodeVideo = true;
                        }
                    }

                    // check if video need to be trimmed
                    var videoDuration = videoAsset.VideoDuration;
                    if (videoDuration.Enabled && (videoDuration.StartTime != TimeSpan.Zero ||
                                                  videoDuration.EndTime.TotalSeconds < videoStream.Duration.TotalSeconds - MathUtil.ZeroToleranceDouble))
                    {
                        mustReEncodeVideo = true;
                    }

                    //check the video target and source resolution
                    Size2 targetSize;
                    if (videoAsset.IsSizeInPercentage)
                    {
                        targetSize = new Size2((int)(videoSize.Width * videoAsset.Width / 100.0f), (int)(videoSize.Height * videoAsset.Height / 100.0f));
                    }
                    else
                    {
                        targetSize = new Size2((int)(videoAsset.Width), (int)(videoAsset.Height));
                    }

                    // ensure that the size is a multiple of 2 (ffmpeg cannot output video not multiple of 2, at least with this codec)
                    if (targetSize.Width % 2 == 1)
                    {
                        targetSize.Width += 1;
                    }
                    if (targetSize.Height % 2 == 1)
                    {
                        targetSize.Height += 1;
                    }

                    if (targetSize.Width != videoSize.Width || targetSize.Height != videoSize.Height)
                    {
                        mustReEncodeVideo = true;
                    }

                    //check the audio settings
                    int  audioChannelsTarget       = audioStream == null? 0: audioStream.ChannelCount;
                    bool mustReEncodeAudioChannels = false;
                    if (videoAsset.IsAudioChannelMono)
                    {
                        audioChannelsTarget = 1;
                        if (audioStream != null && audioStream.ChannelCount != audioChannelsTarget)
                        {
                            mustReEncodeAudioChannels = true;
                            mustReEncodeVideo         = true;
                        }
                    }

                    // Execute ffmpeg to convert source to H.264
                    string tempFile = null;
                    try
                    {
                        if (mustReEncodeVideo)
                        {
                            string targetCodecFormat = "h264";  //hardcodec for now
                            commandContext.Logger.Info(string.Format("Video Asset Compiler: \"{0}\". Re-encode the Video. Format:{1}, Size:{2}x{3}. Audio Channels:{4}",
                                                                     videoAsset.Source.GetFileName(), targetCodecFormat, targetSize.Width, targetSize.Height, audioChannelsTarget));

                            tempFile = Path.GetTempFileName();
                            string channelFlag = "";
                            if (mustReEncodeAudioChannels)
                            {
                                channelFlag = string.Format(" -ac {0}", audioChannelsTarget);
                            }

                            var startTime       = videoDuration.StartTime;
                            var duration        = videoDuration.EndTime - videoDuration.StartTime;
                            var trimmingOptions = videoDuration.Enabled ?
                                                  $" -ss {startTime.Hours:D2}:{startTime.Minutes:D2}:{startTime.Seconds:D2}.{startTime.Milliseconds:D3}" +
                                                  $" -t {duration.Hours:D2}:{duration.Minutes:D2}:{duration.Seconds:D2}.{duration.Milliseconds:D3}":
                                                  "";

                            var commandLine = "  -hide_banner -loglevel error" +                       // hide most log output
                                              "  -nostdin" +                                           // no interaction (background process)
                                              $" -i \"{assetSource.ToWindowsPath()}\"" +               // input file
                                              $"{trimmingOptions}" +
                                              "  -f mp4 -vcodec " + targetCodecFormat +                // codec
                                              channelFlag +                                            // audio channels
                                              $"  -vf scale={targetSize.Width}:{targetSize.Height} " + // adjust the resolution
                                              sidedataStripCommand +                                   // strip of stereoscopic sidedata tag
                                                                                                       //" -an" + // no audio
                                                                                                       //" -pix_fmt yuv422p" + // pixel format (planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples))
                                              $" -y \"{tempFile}\"";                                   // output file (always overwrite)
                            var ret = await ShellHelper.RunProcessAndGetOutputAsync(ffmpeg, commandLine, commandContext.Logger);

                            if (ret != 0 || commandContext.Logger.HasErrors)
                            {
                                throw new AssetException($"Failed to compile a video asset. ffmpeg failed to convert {assetSource}.");
                            }
                        }
                        else
                        {
                            commandContext.Logger.Info(string.Format("Video Asset Compiler: \"{0}\". No Re-encoding necessary",
                                                                     videoAsset.Source.GetFileName()));

                            // Use temporary file
                            tempFile = assetSource.ToWindowsPath();
                        }

                        var dataUrl = Url + "_Data";
                        var video   = new Video.Video
                        {
                            CompressedDataUrl = dataUrl,
                        };

                        // Make sure we don't compress h264 data
                        commandContext.AddTag(new ObjectUrl(UrlType.Content, dataUrl), Builder.DoNotCompressTag);

                        // Write the data
                        using (var reader = new BinaryReader(new FileStream(tempFile, FileMode.Open, FileAccess.Read)))
                            using (var outputStream = MicrothreadLocalDatabases.DatabaseFileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable))
                            {
                                // For now write everything at once, 1MB at a time
                                var length = reader.BaseStream.Length;
                                for (var position = 0L; position < length; position += 2 << 20)
                                {
                                    var buffer = reader.ReadBytes(2 << 20);
                                    outputStream.Write(buffer, 0, buffer.Length);
                                }
                            }

                        var assetManager = new ContentManager(MicrothreadLocalDatabases.ProviderService);
                        assetManager.Save(Url, video);

                        return(ResultStatus.Successful);
                    }
                    finally
                    {
                        if (mustReEncodeVideo)
                        {
                            if (tempFile != null)
                            {
                                File.Delete(tempFile);
                            }
                        }
                    }
                }
                catch (AssetException)
                {
                    throw;
                }
                catch (Exception ex)
                {
                    throw new AssetException("Failed to compile a video asset. Unexpected exception.", ex);
                }
            }
Ejemplo n.º 8
0
        public static async Task CreateOutputVid()
        {
            if (!Directory.Exists(current.interpFolder) || IOUtils.GetAmountOfFiles(current.interpFolder, false) < 2)
            {
                Cancel($"There are no interpolated frames to encode!\n\nDid you delete the folder?");
                return;
            }

            if (!(await InterpolateUtils.CheckEncoderValid()))
            {
                return;
            }

            string[] outFrames = IOUtils.GetFilesSorted(current.interpFolder, $"*.{InterpolateUtils.GetOutExt()}");

            if (outFrames.Length > 0 && !IOUtils.CheckImageValid(outFrames[0]))
            {
                InterpolateUtils.ShowMessage("Invalid frame files detected!\n\nIf you used Auto-Encode, this is normal, and you don't need to run " +
                                             "this step as the video was already created in the \"Interpolate\" step.", "Error");
                return;
            }

            string outPath = Path.Combine(current.outPath, Path.GetFileNameWithoutExtension(current.inPath) + IOUtils.GetCurrentExportSuffix() + FFmpegUtils.GetExt(current.outMode));
            await CreateVideo.Export(current.interpFolder, outPath, current.outMode, true);
        }
Ejemplo n.º 9
0
        public InterpSettings(string serializedData)
        {
            inPath           = "";
            outPath          = "";
            ai               = Networks.networks[0];
            inFps            = 0;
            interpFactor     = 0;
            outFps           = 0;
            outMode          = Interpolate.OutMode.VidMp4;
            model            = "";
            alpha            = false;
            stepByStep       = false;
            inputResolution  = new Size(0, 0);
            scaledResolution = new Size(0, 0);

            Dictionary <string, string> entries = new Dictionary <string, string>();

            foreach (string line in serializedData.SplitIntoLines())
            {
                if (line.Length < 3)
                {
                    continue;
                }
                string[] keyValuePair = line.Split('|');
                entries.Add(keyValuePair[0], keyValuePair[1]);
            }

            foreach (KeyValuePair <string, string> entry in entries)
            {
                switch (entry.Key)
                {
                case "INPATH": inPath = entry.Value; break;

                case "OUTPATH": outPath = entry.Value; break;

                case "AI": ai = Networks.GetAi(entry.Value); break;

                case "INFPS": inFps = float.Parse(entry.Value); break;

                case "OUTFPS": outFps = float.Parse(entry.Value); break;

                case "INTERPFACTOR": interpFactor = float.Parse(entry.Value); break;

                case "OUTMODE": outMode = (Interpolate.OutMode)Enum.Parse(typeof(Interpolate.OutMode), entry.Value); break;

                case "MODEL": model = entry.Value; break;

                case "INPUTRES": inputResolution = FormatUtils.ParseSize(entry.Value); break;

                case "OUTPUTRES": scaledResolution = FormatUtils.ParseSize(entry.Value); break;

                case "ALPHA": alpha = bool.Parse(entry.Value); break;

                case "STEPBYSTEP": stepByStep = bool.Parse(entry.Value); break;
                }
            }

            try
            {
                tempFolder    = InterpolateUtils.GetTempFolderLoc(inPath, outPath);
                framesFolder  = Path.Combine(tempFolder, Paths.framesDir);
                interpFolder  = Path.Combine(tempFolder, Paths.interpDir);
                inputIsFrames = IOUtils.IsPathDirectory(inPath);
                outFilename   = Path.Combine(outPath, Path.GetFileNameWithoutExtension(inPath) + IOUtils.GetExportSuffix(interpFactor, ai, model) + FFmpegUtils.GetExt(outMode));
            }
            catch
            {
                Logger.Log("Tried to create InterpSettings struct without an inpath. Can't set tempFolder, framesFolder and interpFolder.", true);
                tempFolder    = "";
                framesFolder  = "";
                interpFolder  = "";
                inputIsFrames = false;
                outFilename   = "";
            }
        }
Ejemplo n.º 10
0
        public InterpSettings(string inPathArg, string outPathArg, AI aiArg, float inFpsArg, int interpFactorArg, Interpolate.OutMode outModeArg, string modelArg)
        {
            inPath       = inPathArg;
            outPath      = outPathArg;
            ai           = aiArg;
            inFps        = inFpsArg;
            interpFactor = interpFactorArg;
            outFps       = inFpsArg * interpFactorArg;
            outMode      = outModeArg;
            model        = modelArg;

            alpha      = false;
            stepByStep = false;

            try
            {
                tempFolder    = InterpolateUtils.GetTempFolderLoc(inPath, outPath);
                framesFolder  = Path.Combine(tempFolder, Paths.framesDir);
                interpFolder  = Path.Combine(tempFolder, Paths.interpDir);
                inputIsFrames = IOUtils.IsPathDirectory(inPath);
                outFilename   = Path.Combine(outPath, Path.GetFileNameWithoutExtension(inPath) + IOUtils.GetExportSuffix(interpFactor, ai, model) + FFmpegUtils.GetExt(outMode));
            }
            catch
            {
                Logger.Log("Tried to create InterpSettings struct without an inpath. Can't set tempFolder, framesFolder and interpFolder.", true);
                tempFolder    = "";
                framesFolder  = "";
                interpFolder  = "";
                inputIsFrames = false;
                outFilename   = "";
            }

            inputResolution  = new Size(0, 0);
            scaledResolution = new Size(0, 0);
        }
Ejemplo n.º 11
0
 public void UpdatePaths(string inPathArg, string outPathArg)
 {
     inPath        = inPathArg;
     outPath       = outPathArg;
     tempFolder    = InterpolateUtils.GetTempFolderLoc(inPath, outPath);
     framesFolder  = Path.Combine(tempFolder, Paths.framesDir);
     interpFolder  = Path.Combine(tempFolder, Paths.interpDir);
     inputIsFrames = IOUtils.IsPathDirectory(inPath);
     outFilename   = Path.Combine(outPath, Path.GetFileNameWithoutExtension(inPath) + IOUtils.GetExportSuffix(interpFactor, ai, model) + FFmpegUtils.GetExt(outMode));
 }
Ejemplo n.º 12
0
        public static async Task MainLoop(string interpFramesPath)
        {
            try
            {
                UpdateChunkAndBufferSizes();

                interpFramesFolder = interpFramesPath;
                videoChunksFolder  = Path.Combine(interpFramesPath.GetParentDir(), Paths.chunksDir);
                if (Interpolate.currentlyUsingAutoEnc)
                {
                    Directory.CreateDirectory(videoChunksFolder);
                }

                encodedFrameLines.Clear();
                unencodedFrameLines.Clear();

                Logger.Log($"[AutoEnc] Starting AutoEncode MainLoop - Chunk Size: {chunkSize} Frames - Safety Buffer: {safetyBufferFrames} Frames", true);
                int    videoIndex = 1;
                string encFile    = Path.Combine(interpFramesPath.GetParentDir(), Paths.GetFrameOrderFilename(Interpolate.current.interpFactor));
                interpFramesLines = IOUtils.ReadLines(encFile).Select(x => x.Split('/').Last().Remove("'").Split('#').First()).ToArray();     // Array with frame filenames

                while (!Interpolate.canceled && GetInterpFramesAmount() < 2)
                {
                    await Task.Delay(2000);
                }

                int lastEncodedFrameNum = 0;

                while (HasWorkToDo())    // Loop while proc is running and not all frames have been encoded
                {
                    if (Interpolate.canceled)
                    {
                        return;
                    }

                    if (paused)
                    {
                        await Task.Delay(200);

                        continue;
                    }

                    unencodedFrameLines.Clear();

                    for (int vfrLine = lastEncodedFrameNum; vfrLine < interpFramesLines.Length; vfrLine++)
                    {
                        unencodedFrameLines.Add(vfrLine);
                    }

                    bool aiRunning = !AiProcess.currentAiProcess.HasExited;

                    if (unencodedFrameLines.Count > 0 && (unencodedFrameLines.Count >= (chunkSize + safetyBufferFrames) || !aiRunning))     // Encode every n frames, or after process has exited
                    {
                        List <int> frameLinesToEncode = aiRunning ? unencodedFrameLines.Take(chunkSize).ToList() : unencodedFrameLines;     // Take all remaining frames if process is done
                        string     lastOfChunk        = Path.Combine(interpFramesPath, interpFramesLines[frameLinesToEncode.Last()]);

                        if (!File.Exists(lastOfChunk))
                        {
                            await Task.Delay(500);

                            continue;
                        }

                        busy = true;
                        string outpath      = Path.Combine(videoChunksFolder, "chunks", $"{videoIndex.ToString().PadLeft(4, '0')}{FFmpegUtils.GetExt(Interpolate.current.outMode)}");
                        int    firstLineNum = frameLinesToEncode.First();
                        int    lastLineNum  = frameLinesToEncode.Last();
                        Logger.Log($"[AutoEnc] Encoding Chunk #{videoIndex} to '{outpath}' using line {firstLineNum} ({Path.GetFileName(interpFramesLines[firstLineNum])}) through {lastLineNum} ({Path.GetFileName(Path.GetFileName(interpFramesLines[frameLinesToEncode.Last()]))})", true, false, "ffmpeg");

                        await CreateVideo.EncodeChunk(outpath, Interpolate.current.outMode, firstLineNum, frameLinesToEncode.Count);

                        if (Interpolate.canceled)
                        {
                            return;
                        }

                        if (aiRunning && Config.GetInt("autoEncMode") == 2)
                        {
                            Task.Run(() => DeleteOldFramesAsync(interpFramesPath, frameLinesToEncode));
                        }

                        if (Interpolate.canceled)
                        {
                            return;
                        }

                        encodedFrameLines.AddRange(frameLinesToEncode);

                        Logger.Log("Done Encoding Chunk #" + videoIndex, true, false, "ffmpeg");
                        lastEncodedFrameNum = (frameLinesToEncode.Last() + 1);

                        videoIndex++;
                        busy = false;
                    }
                    await Task.Delay(50);
                }

                if (Interpolate.canceled)
                {
                    return;
                }
                await CreateVideo.ChunksToVideos(Interpolate.current.tempFolder, videoChunksFolder, Interpolate.current.outFilename);
            }
            catch (Exception e)
            {
                Logger.Log($"AutoEnc Error: {e.Message}. Stack Trace:\n{e.StackTrace}");
                Interpolate.Cancel("Auto-Encode encountered an error.");
            }
        }