예제 #1
0
        public void source_directory_is_mirrored_in_output()
        {
            var sourceDir = ExampleScripts.Valid.Hierarchy;


            new Compiler().Compile(new CompilerOptions
            {
                Compile   = true,
                OutputDir = OutputDir.ToString(),
                Path      = sourceDir.ToString()
            });

            var sourceFiles         = sourceDir.Glob("*.coffee");
            var expectedTargetFiles = sourceFiles
                                      .Select(src => Path.ChangeExtension(src.FullName, "js"))
                                      .Select(src => src.Replace(sourceDir.ToString(), OutputDir.ToString()))
                                      .OrderBy(f => f)
                                      .ToArray();

            var actualTargetFiles = OutputDir.Glob("*.*").Select(f => f.FullName).OrderBy(f => f).ToArray();

            Console.WriteLine("Expecting:");
            expectedTargetFiles.ForEach(Console.WriteLine);

            Console.WriteLine("Got:");
            actualTargetFiles.ForEach(Console.WriteLine);

            Assert.That(actualTargetFiles, Is.EqualTo(expectedTargetFiles));
        }
예제 #2
0
        private void ValidateOutputDir()
        {
            if (string.IsNullOrWhiteSpace(OutputDir))
            {
                throw new InvalidOperationException("The output directory name cannot be null, empty or white space.");
            }

            if (OutputDir.Any(c => Path.GetInvalidPathChars().Contains(c)))
            {
                throw new InvalidOperationException($"Specified output directory {OutputDir} contains invalid path characters.");
            }

            OutputDir = Path.IsPathRooted(OutputDir) ? OutputDir : Path.GetFullPath(OutputDir);
            if (OutputDir.Length > 80)
            {
                throw new InvalidOperationException($"The output directory path {OutputDir} is too long (>80 characters). Tests writing here may trigger errors because of path length limits");
            }
            try
            {
                Directory.CreateDirectory(OutputDir);
            }
            catch (IOException e)
            {
                throw new Exception($"Unable to create output directory {OutputDir}: {e.Message}", e);
            }
        }
예제 #3
0
 public CodeExporter(ILogger <CodeExporter> logger,
                     OutputDir outputDir,
                     CodeStripper stripper)
 {
     _logger    = logger;
     _outputDir = outputDir;
     _stripper  = stripper;
 }
예제 #4
0
        public void Build()
        {
            OutputDir.CreateSubdirectory("data");

            foreach (var file in RootDir.EnumerateFiles("*.emdl"))
            {
                new EmdlFile(file).Build();
            }
        }
예제 #5
0
        public void BuildOneFile_ShouldReturnOneFile()
        {
            var arguments = (BuildArguments?)ArgumentParser.TryParse(new[] { "build", "file1" });

            // using classic assert so R# understands the value is not null
            Assert.IsNotNull(arguments);
            arguments !.InputFile.Should().Be("file1");
            arguments !.OutputToStdOut.Should().BeFalse();
            arguments !.OutputDir.Should().BeNull();
            arguments !.OutputFile.Should().BeNull();
        }
예제 #6
0
        public void Build_with_outputfile_parameter_should_parse_correctly()
        {
            var arguments = (BuildArguments?)ArgumentParser.TryParse(new[] { "build", "--outfile", "jsonFile", "file1" });

            // using classic assert so R# understands the value is not null
            Assert.IsNotNull(arguments);
            arguments !.InputFile.Should().Be("file1");
            arguments !.OutputToStdOut.Should().BeFalse();
            arguments !.OutputDir.Should().BeNull();
            arguments !.OutputFile.Should().Be("jsonFile");
        }
예제 #7
0
        public void Decompile_with_outputdir_parameter_should_parse_correctly()
        {
            var arguments = ArgumentParser.TryParse(new[] { "build", "--outdir", "outdir", "file1" });
            var bulidOrDecompileArguments = (BuildOrDecompileArguments?)arguments;

            // using classic assert so R# understands the value is not null
            Assert.IsNotNull(arguments);
            bulidOrDecompileArguments !.InputFile.Should().Be("file1");
            bulidOrDecompileArguments !.OutputToStdOut.Should().BeFalse();
            bulidOrDecompileArguments !.OutputDir.Should().Be("outdir");
            bulidOrDecompileArguments !.OutputFile.Should().BeNull();
        }
예제 #8
0
 public ExampleIndexExporter(BuilderSettings settings,
                             ILogger <ExampleIndexExporter> logger,
                             MarkdownParser markdownParser,
                             Layout layout,
                             OutputDir outputDir)
 {
     _baseUrl        = settings.BaseUrl;
     _logger         = logger;
     _markdownParser = markdownParser;
     _layout         = layout;
     _outputDir      = outputDir;
 }
예제 #9
0
        public void DecompileOneFile_ShouldReturnOneFile()
        {
            var arguments = ArgumentParser.TryParse(new[] { "decompile", "file1" });
            var bulidOrDecompileArguments = (DecompileArguments?)arguments;

            // using classic assert so R# understands the value is not null
            Assert.IsNotNull(arguments);
            bulidOrDecompileArguments !.InputFile.Should().Be("file1");
            bulidOrDecompileArguments !.OutputToStdOut.Should().BeFalse();
            bulidOrDecompileArguments !.OutputDir.Should().BeNull();
            bulidOrDecompileArguments !.OutputFile.Should().BeNull();
        }
예제 #10
0
        public void BuildOneFileStdOutAllCaps_ShouldReturnOneFileAndStdout()
        {
            var arguments = ArgumentParser.TryParse(new[] { "build", "--STDOUT", "file1" });
            var bulidOrDecompileArguments = (BuildArguments?)arguments;

            // using classic assert so R# understands the value is not null
            Assert.IsNotNull(arguments);
            bulidOrDecompileArguments !.InputFile.Should().Be("file1");
            bulidOrDecompileArguments !.OutputToStdOut.Should().BeTrue();
            bulidOrDecompileArguments !.OutputDir.Should().BeNull();
            bulidOrDecompileArguments !.OutputFile.Should().BeNull();
        }
예제 #11
0
        public void BuildOneFileStdOut_and_no_restore_ShouldReturnOneFileAndStdout()
        {
            var arguments      = ArgumentParser.TryParse(new[] { "build", "--stdout", "--no-restore", "file1" });
            var buildArguments = (BuildArguments?)arguments;

            // using classic assert so R# understands the value is not null
            Assert.IsNotNull(arguments);
            buildArguments !.InputFile.Should().Be("file1");
            buildArguments !.OutputToStdOut.Should().BeTrue();
            buildArguments !.OutputDir.Should().BeNull();
            buildArguments !.OutputFile.Should().BeNull();
            buildArguments !.NoRestore.Should().BeTrue();
        }
예제 #12
0
        public void Decompile_with_outputdir_parameter_should_parse_correctly()
        {
            // Use relative . to ensure directory exists else the parser will throw.
            var arguments = ArgumentParser.TryParse(new[] { "decompile", "--outdir", ".", "file1" });
            var bulidOrDecompileArguments = (DecompileArguments?)arguments;

            // using classic assert so R# understands the value is not null
            Assert.IsNotNull(arguments);
            bulidOrDecompileArguments !.InputFile.Should().Be("file1");
            bulidOrDecompileArguments !.OutputToStdOut.Should().BeFalse();
            bulidOrDecompileArguments !.OutputDir.Should().Be(".");
            bulidOrDecompileArguments !.OutputFile.Should().BeNull();
        }
예제 #13
0
 public ExampleExporter(ILogger <ExampleExporter> logger,
                        MarkdownParser markdownParser,
                        JsConcepts jsConcepts,
                        UxConcepts uxConcepts,
                        OutputDir outputDir,
                        Layout layout)
 {
     _logger         = logger;
     _markdownParser = markdownParser;
     _jsConcepts     = jsConcepts;
     _uxConcepts     = uxConcepts;
     _outputDir      = outputDir;
     _layout         = layout;
 }
예제 #14
0
        protected virtual Dictionary <string, string> GetOptions()
        {
            var options = new Dictionary <string, string>();

            options["IceHome"]      = IceHome;
            options["IceToolsPath"] = IceToolsPath;
            options["OutputDir"]    = OutputDir.TrimEnd('\\');
            if (IncludeDirectories != null && IncludeDirectories.Length > 0)
            {
                options["IncludeDirectories"] = string.Join(";", IncludeDirectories);
            }
            if (AdditionalOptions != null)
            {
                options["AdditionalOptions"] = string.Join(";", AdditionalOptions);
            }
            return(options);
        }
예제 #15
0
 /// <summary>
 /// Attempts to migrate all sub-directories when the working directory is changed.
 /// Currently this only occurs in the GUI when a working directory has been changed.
 /// </summary>
 public void AttemptSubDirMigration(string newWDirAbs)
 {
     try
     {
         if (!newWDirAbs.EndsWith(DSStr))
         {
             newWDirAbs += DS;
         }
         string tempPath;
         if (!InputDir.StartsWith(".." + DS))
         {
             tempPath = newWDirAbs + InputDir;
             if (Directory.Exists(tempPath))
             {
                 iDir = tempPath;
                 if (!DefaultSeedFileName.StartsWith(".." + DS))
                 {
                     tempPath = iDir + DefaultSeedFileName;
                     if (File.Exists(tempPath))
                     {
                         defaultSeedFileName = tempPath;
                     }
                 }
             }
         }
         if (!OutputDir.StartsWith(".." + DS))
         {
             tempPath = newWDirAbs + OutputDir;
             if (Directory.Exists(tempPath))
             {
                 oDir = tempPath;
             }
         }
         if (!RulesDir.StartsWith(".." + DS))
         {
             tempPath = newWDirAbs + RulesDir;
             if (Directory.Exists(tempPath))
             {
                 rDir = tempPath;
                 if (!CompiledRuleFunctions.StartsWith(".." + DS))
                 {
                     tempPath = rDir + CustomShapesFile;
                     if (File.Exists(tempPath))
                     {
                         compiledRuleFunctions = tempPath;
                     }
                 }
                 var relRuleSets = DefaultRuleSets.Split(',');
                 for (var i = 0; i < relRuleSets.GetLength(0); i++)
                 {
                     if (!string.IsNullOrWhiteSpace(relRuleSets[i]) && !relRuleSets[i].StartsWith(".." + DS))
                     {
                         tempPath = rDir + relRuleSets[i];
                         if (File.Exists(tempPath))
                         {
                             defaultRSFileNames[i] = tempPath;
                         }
                     }
                 }
             }
         }
         if (!SearchDir.StartsWith(".." + DS))
         {
             tempPath = newWDirAbs + SearchDir;
             if (Directory.Exists(tempPath))
             {
                 sDir = tempPath;
             }
         }
         if (!GraphLayoutDir.StartsWith(".." + DS))
         {
             tempPath = newWDirAbs + GraphLayoutDir;
             if (Directory.Exists(tempPath))
             {
                 glDir = tempPath;
             }
         }
         if (!CustomShapesFile.StartsWith(".." + DS))
         {
             tempPath = newWDirAbs + CustomShapesFile;
             if (File.Exists(tempPath))
             {
                 customShapesFile = tempPath;
             }
         }
     }
     catch (Exception exc)
     {
         ErrorLogger.Catch(exc);
     }
 }
예제 #16
0
 File.WriteAllText(Path.Combine(OutputDir, FileName.RemoveInvalidChars() + ".txt"), $"{Tags} \n{Description}");
예제 #17
0
        public override bool ProcessFeatures(Dictionary <string, Help.Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get the splitting mode
            SplittingMode splittingMode = GetSplittingMode(features);

            if (splittingMode == SplittingMode.None)
            {
                logger.Error("No valid splitting mode found!");
                return(false);
            }

            // Get only files from the inputs
            List <ParentablePath> files = PathTool.GetFilesOnly(Inputs, appendparent: true);

            // Loop over the input files
            foreach (ParentablePath file in files)
            {
                // Create and fill the new DAT
                DatFile internalDat = DatFile.Create(Header);
                Parser.ParseInto(internalDat, file);

                // Get the output directory
                OutputDir = OutputDir.Ensure();
                OutputDir = file.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                // Extension splitting
                if (splittingMode.HasFlag(SplittingMode.Extension))
                {
                    (DatFile extADat, DatFile extBDat) = DatTools.Splitter.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue));

                    InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs");

                    // Output both possible DatFiles
                    Writer.Write(extADat, OutputDir);
                    Writer.Write(extBDat, OutputDir);

                    watch.Stop();
                }

                // Hash splitting
                if (splittingMode.HasFlag(SplittingMode.Hash))
                {
                    Dictionary <DatItemField, DatFile> typeDats = DatTools.Splitter.SplitByHash(internalDat);

                    InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs");

                    // Loop through each type DatFile
                    Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType =>
                    {
                        Writer.Write(typeDats[itemType], OutputDir);
                    });

                    watch.Stop();
                }

                // Level splitting
                if (splittingMode.HasFlag(SplittingMode.Level))
                {
                    logger.Warning("This feature is not implemented: level-split");
                    DatTools.Splitter.SplitByLevel(
                        internalDat,
                        OutputDir,
                        GetBoolean(features, ShortValue),
                        GetBoolean(features, BaseValue));
                }

                // Size splitting
                if (splittingMode.HasFlag(SplittingMode.Size))
                {
                    (DatFile lessThan, DatFile greaterThan) = DatTools.Splitter.SplitBySize(internalDat, GetInt64(features, RadixInt64Value));

                    InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs");

                    // Output both possible DatFiles
                    Writer.Write(lessThan, OutputDir);
                    Writer.Write(greaterThan, OutputDir);

                    watch.Stop();
                }

                // Total Size splitting
                if (splittingMode.HasFlag(SplittingMode.TotalSize))
                {
                    logger.Warning("This feature is not implemented: level-split");
                    List <DatFile> sizedDats = DatTools.Splitter.SplitByTotalSize(internalDat, GetInt64(features, ChunkSizeInt64Value));

                    InternalStopwatch watch = new InternalStopwatch("Outputting total-size-split DATs");

                    // Loop through each type DatFile
                    Parallel.ForEach(sizedDats, Globals.ParallelOptions, sizedDat =>
                    {
                        Writer.Write(sizedDat, OutputDir);
                    });

                    watch.Stop();
                }

                // Type splitting
                if (splittingMode.HasFlag(SplittingMode.Type))
                {
                    Dictionary <ItemType, DatFile> typeDats = DatTools.Splitter.SplitByType(internalDat);

                    InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs");

                    // Loop through each type DatFile
                    Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType =>
                    {
                        Writer.Write(typeDats[itemType], OutputDir);
                    });

                    watch.Stop();
                }
            }

            return(true);
        }
예제 #18
0
        public override bool ProcessFeatures(Dictionary <string, Help.Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get feature flags
            var updateDatItemFields = GetUpdateDatItemFields(features);
            var updateMachineFields = GetUpdateMachineFields(features);
            var updateMode          = GetUpdateMode(features);

            // Normalize the extensions
            Header.AddExtension = (string.IsNullOrWhiteSpace(Header.AddExtension) || Header.AddExtension.StartsWith(".")
                ? Header.AddExtension
                : $".{Header.AddExtension}");
            Header.ReplaceExtension = (string.IsNullOrWhiteSpace(Header.ReplaceExtension) || Header.ReplaceExtension.StartsWith(".")
                ? Header.ReplaceExtension
                : $".{Header.ReplaceExtension}");

            // If we're in a non-replacement special update mode and the names aren't set, set defaults
            if (updateMode != 0 &&
                !(updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace)))
            {
                // Get the values that will be used
                if (string.IsNullOrWhiteSpace(Header.Date))
                {
                    Header.Date = DateTime.Now.ToString("yyyy-MM-dd");
                }

                if (string.IsNullOrWhiteSpace(Header.Name))
                {
                    Header.Name = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
                                  + (Header.Type == "SuperDAT" ? "-SuperDAT" : string.Empty)
                                  + (Cleaner.DedupeRoms != DedupeType.None ? "-deduped" : string.Empty);
                }

                if (string.IsNullOrWhiteSpace(Header.Description))
                {
                    Header.Description = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
                                         + (Header.Type == "SuperDAT" ? "-SuperDAT" : string.Empty)
                                         + (Cleaner.DedupeRoms != DedupeType.None ? " - deduped" : string.Empty);

                    if (!GetBoolean(features, NoAutomaticDateValue))
                    {
                        Header.Description += $" ({Header.Date})";
                    }
                }

                if (string.IsNullOrWhiteSpace(Header.Category) && updateMode != 0)
                {
                    Header.Category = "DiffDAT";
                }

                if (string.IsNullOrWhiteSpace(Header.Author))
                {
                    Header.Author = $"SabreTools {Prepare.Version}";
                }

                if (string.IsNullOrWhiteSpace(Header.Comment))
                {
                    Header.Comment = $"Generated by SabreTools {Prepare.Version}";
                }
            }

            // If no update fields are set, default to Names
            if (updateDatItemFields == null || updateDatItemFields.Count == 0)
            {
                updateDatItemFields = new List <DatItemField>()
                {
                    DatItemField.Name
                }
            }
            ;

            // Ensure we only have files in the inputs
            List <ParentablePath> inputPaths = PathTool.GetFilesOnly(Inputs, appendparent: true);
            List <ParentablePath> basePaths  = PathTool.GetFilesOnly(GetList(features, BaseDatListValue));

            // Ensure the output directory
            OutputDir = OutputDir.Ensure();

            // If we're in standard update mode, run through all of the inputs
            if (updateMode == UpdateMode.None)
            {
                // Loop through each input and update
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Create a new base DatFile
                    DatFile datFile = DatFile.Create(Header);
                    logger.User($"Processing '{Path.GetFileName(inputPath.CurrentPath)}'");
                    Parser.ParseInto(datFile, inputPath, keep: true,
                                     keepext: datFile.Header.DatFormat.HasFlag(DatFormat.TSV) ||
                                     datFile.Header.DatFormat.HasFlag(DatFormat.CSV) ||
                                     datFile.Header.DatFormat.HasFlag(DatFormat.SSV));

                    // Perform additional processing steps
                    Extras.ApplyExtras(datFile);
                    Splitter.ApplySplitting(datFile, false);
                    Filter.ApplyFilters(datFile);
                    Cleaner.ApplyCleaning(datFile);
                    Remover.ApplyRemovals(datFile);

                    // Get the correct output path
                    string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file, overwriting only if it's not in the current directory
                    Writer.Write(datFile, realOutDir, overwrite: GetBoolean(features, InplaceValue));
                });

                return(true);
            }

            // Reverse inputs if we're in a required mode
            if (updateMode.HasFlag(UpdateMode.DiffReverseCascade))
            {
                updateMode |= UpdateMode.DiffCascade;
                inputPaths.Reverse();
            }
            if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace))
            {
                updateMode |= UpdateMode.BaseReplace;
                basePaths.Reverse();
            }

            // Create a DAT to capture inputs
            DatFile userInputDat = DatFile.Create(Header);

            // Populate using the correct set
            List <DatHeader> datHeaders;

            if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace))
            {
                datHeaders = DatFileTool.PopulateUserData(userInputDat, basePaths);
            }
            else
            {
                datHeaders = DatFileTool.PopulateUserData(userInputDat, inputPaths);
            }

            // Perform additional processing steps
            Extras.ApplyExtras(userInputDat);
            Splitter.ApplySplitting(userInputDat, false);
            Filter.ApplyFilters(userInputDat);
            Cleaner.ApplyCleaning(userInputDat);
            Remover.ApplyRemovals(userInputDat);

            // Output only DatItems that are duplicated across inputs
            if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
            {
                DatFile dupeData = DatFileTool.DiffDuplicates(userInputDat, inputPaths);

                InternalStopwatch watch = new InternalStopwatch("Outputting duplicate DAT");
                Writer.Write(dupeData, OutputDir, overwrite: false);
                watch.Stop();
            }

            // Output only DatItems that are not duplicated across inputs
            if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly))
            {
                DatFile outerDiffData = DatFileTool.DiffNoDuplicates(userInputDat, inputPaths);

                InternalStopwatch watch = new InternalStopwatch("Outputting no duplicate DAT");
                Writer.Write(outerDiffData, OutputDir, overwrite: false);
                watch.Stop();
            }

            // Output only DatItems that are unique to each input
            if (updateMode.HasFlag(UpdateMode.DiffIndividualsOnly))
            {
                // Get all of the output DatFiles
                List <DatFile> datFiles = DatFileTool.DiffIndividuals(userInputDat, inputPaths);

                // Loop through and output the new DatFiles
                InternalStopwatch watch = new InternalStopwatch("Outputting all individual DATs");

                Parallel.For(0, inputPaths.Count, Globals.ParallelOptions, j =>
                {
                    string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file
                    Writer.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue));
                });

                watch.Stop();
            }

            // Output cascaded diffs
            if (updateMode.HasFlag(UpdateMode.DiffCascade))
            {
                // Preprocess the DatHeaders
                Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
                {
                    // If we're outputting to the runtime folder, rename
                    if (!GetBoolean(features, InplaceValue) && OutputDir == Environment.CurrentDirectory)
                    {
                        string innerpost = $" ({j} - {inputPaths[j].GetNormalizedFileName(true)} Only)";

                        datHeaders[j]              = userInputDat.Header;
                        datHeaders[j].FileName    += innerpost;
                        datHeaders[j].Name        += innerpost;
                        datHeaders[j].Description += innerpost;
                    }
                });

                // Get all of the output DatFiles
                List <DatFile> datFiles = DatFileTool.DiffCascade(userInputDat, datHeaders);

                // Loop through and output the new DatFiles
                InternalStopwatch watch = new InternalStopwatch("Outputting all created DATs");

                int startIndex = GetBoolean(features, SkipFirstOutputValue) ? 1 : 0;
                Parallel.For(startIndex, inputPaths.Count, Globals.ParallelOptions, j =>
                {
                    string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file
                    Writer.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue));
                });

                watch.Stop();
            }

            // Output differences against a base DAT
            if (updateMode.HasFlag(UpdateMode.DiffAgainst))
            {
                // Loop through each input and diff against the base
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Parse the path to a new DatFile
                    DatFile repDat = DatFile.Create(Header);
                    Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);

                    // Perform additional processing steps
                    Extras.ApplyExtras(repDat);
                    Splitter.ApplySplitting(repDat, false);
                    Filter.ApplyFilters(repDat);
                    Cleaner.ApplyCleaning(repDat);
                    Remover.ApplyRemovals(repDat);

                    // Now replace the fields from the base DatFile
                    DatFileTool.DiffAgainst(userInputDat, repDat, GetBoolean(Features, ByGameValue));

                    // Finally output the diffed DatFile
                    string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
                    Writer.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue));
                });
            }

            // Output DATs after replacing fields from a base DatFile
            if (updateMode.HasFlag(UpdateMode.BaseReplace))
            {
                // Loop through each input and apply the base DatFile
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Parse the path to a new DatFile
                    DatFile repDat = DatFile.Create(Header);
                    Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);

                    // Perform additional processing steps
                    Extras.ApplyExtras(repDat);
                    Splitter.ApplySplitting(repDat, false);
                    Filter.ApplyFilters(repDat);
                    Cleaner.ApplyCleaning(repDat);
                    Remover.ApplyRemovals(repDat);

                    // Now replace the fields from the base DatFile
                    DatFileTool.BaseReplace(
                        userInputDat,
                        repDat,
                        updateMachineFields,
                        updateDatItemFields,
                        GetBoolean(features, OnlySameValue));

                    // Finally output the replaced DatFile
                    string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
                    Writer.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue));
                });
            }

            // Merge all input files and write
            // This has to be last due to the SuperDAT handling
            if (updateMode.HasFlag(UpdateMode.Merge))
            {
                // If we're in SuperDAT mode, prefix all games with their respective DATs
                if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase))
                {
                    DatFileTool.ApplySuperDAT(userInputDat, inputPaths);
                }

                Writer.Write(userInputDat, OutputDir);
            }

            return(true);
        }
    }