public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features) { base.ProcessFeatures(features); // Get feature flags var updateFields = GetUpdateFields(features); var updateMode = GetUpdateMode(features); // Normalize the extensions Header.AddExtension = (string.IsNullOrWhiteSpace(Header.AddExtension) || Header.AddExtension.StartsWith(".") ? Header.AddExtension : $".{Header.AddExtension}"); Header.ReplaceExtension = (string.IsNullOrWhiteSpace(Header.ReplaceExtension) || Header.ReplaceExtension.StartsWith(".") ? Header.ReplaceExtension : $".{Header.ReplaceExtension}"); // If we're in a special update mode and the names aren't set, set defaults if (updateMode != 0) { // Get the values that will be used if (string.IsNullOrWhiteSpace(Header.Date)) { Header.Date = DateTime.Now.ToString("yyyy-MM-dd"); } if (string.IsNullOrWhiteSpace(Header.Name)) { Header.Name = (updateMode != 0 ? "DiffDAT" : "MergeDAT") + (Header.Type == "SuperDAT" ? "-SuperDAT" : string.Empty) + (Cleaner.DedupeRoms != DedupeType.None ? "-deduped" : string.Empty); } if (string.IsNullOrWhiteSpace(Header.Description)) { Header.Description = (updateMode != 0 ? "DiffDAT" : "MergeDAT") + (Header.Type == "SuperDAT" ? "-SuperDAT" : string.Empty) + (Cleaner.DedupeRoms != DedupeType.None ? " - deduped" : string.Empty); if (!GetBoolean(features, NoAutomaticDateValue)) { Header.Description += $" ({Header.Date})"; } } if (string.IsNullOrWhiteSpace(Header.Category) && updateMode != 0) { Header.Category = "DiffDAT"; } if (string.IsNullOrWhiteSpace(Header.Author)) { Header.Author = "SabreTools"; } } // If no update fields are set, default to Names if (updateFields == null || updateFields.Count == 0) { updateFields = new List <Field>() { Field.DatItem_Name } } ; // Ensure we only have files in the inputs List <ParentablePath> inputPaths = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true); List <ParentablePath> basePaths = DirectoryExtensions.GetFilesOnly(GetList(features, BaseDatListValue)); // If we're in standard update mode, run through all of the inputs if (updateMode == UpdateMode.None) { // Loop through each input and update Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath => { // Create a new base DatFile DatFile datFile = DatFile.Create(Header); logger.User($"Processing '{Path.GetFileName(inputPath.CurrentPath)}'"); datFile.Parse(inputPath, keep: true, keepext: datFile.Header.DatFormat.HasFlag(DatFormat.TSV) || datFile.Header.DatFormat.HasFlag(DatFormat.CSV) || datFile.Header.DatFormat.HasFlag(DatFormat.SSV)); // Perform additional processing steps datFile.ApplyExtras(Extras); datFile.ApplySplitting(GetSplitType(features), false); datFile.ApplyFilter(Filter); datFile.ApplyCleaning(Cleaner); // Get the correct output path string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); // Try to output the file, overwriting only if it's not in the current directory datFile.Write(realOutDir, overwrite: GetBoolean(features, InplaceValue)); }); return; } // Reverse inputs if we're in a required mode if (updateMode.HasFlag(UpdateMode.DiffReverseCascade)) { updateMode |= UpdateMode.DiffCascade; inputPaths.Reverse(); } if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace)) { updateMode |= UpdateMode.BaseReplace; basePaths.Reverse(); } // Create a DAT to capture inputs DatFile userInputDat = DatFile.Create(Header); // Populate using the correct set List <DatHeader> datHeaders; if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace)) { datHeaders = userInputDat.PopulateUserData(basePaths); } else { datHeaders = userInputDat.PopulateUserData(inputPaths); } // Perform additional processing steps userInputDat.ApplyExtras(Extras); userInputDat.ApplySplitting(GetSplitType(features), false); userInputDat.ApplyFilter(Filter); userInputDat.ApplyCleaning(Cleaner); // Output only DatItems that are duplicated across inputs if (updateMode.HasFlag(UpdateMode.DiffDupesOnly)) { DatFile dupeData = userInputDat.DiffDuplicates(inputPaths); InternalStopwatch watch = new InternalStopwatch("Outputting duplicate DAT"); dupeData.Write(OutputDir, overwrite: false); watch.Stop(); } // Output only DatItems that are not duplicated across inputs if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly)) { DatFile outerDiffData = userInputDat.DiffNoDuplicates(inputPaths); InternalStopwatch watch = new InternalStopwatch("Outputting no duplicate DAT"); outerDiffData.Write(OutputDir, overwrite: false); watch.Stop(); } // Output only DatItems that are unique to each input if (updateMode.HasFlag(UpdateMode.DiffIndividualsOnly)) { // Get all of the output DatFiles List <DatFile> datFiles = userInputDat.DiffIndividuals(inputPaths); // Loop through and output the new DatFiles InternalStopwatch watch = new InternalStopwatch("Outputting all individual DATs"); Parallel.For(0, inputPaths.Count, Globals.ParallelOptions, j => { string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); // Try to output the file datFiles[j].Write(path, overwrite: GetBoolean(features, InplaceValue)); }); watch.Stop(); } // Output cascaded diffs if (updateMode.HasFlag(UpdateMode.DiffCascade)) { // Preprocess the DatHeaders Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j => { // If we're outputting to the runtime folder, rename if (!GetBoolean(features, InplaceValue) && OutputDir == Environment.CurrentDirectory) { string innerpost = $" ({j} - {inputPaths[j].GetNormalizedFileName(true)} Only)"; datHeaders[j] = userInputDat.Header; datHeaders[j].FileName += innerpost; datHeaders[j].Name += innerpost; datHeaders[j].Description += innerpost; } }); // Get all of the output DatFiles List <DatFile> datFiles = userInputDat.DiffCascade(datHeaders); // Loop through and output the new DatFiles InternalStopwatch watch = new InternalStopwatch("Outputting all created DATs"); int startIndex = GetBoolean(features, SkipFirstOutputValue) ? 1 : 0; Parallel.For(startIndex, inputPaths.Count, Globals.ParallelOptions, j => { string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); // Try to output the file datFiles[j].Write(path, overwrite: GetBoolean(features, InplaceValue)); }); watch.Stop(); } // Output differences against a base DAT if (updateMode.HasFlag(UpdateMode.DiffAgainst)) { // Loop through each input and diff against the base Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath => { // Parse the path to a new DatFile DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering()); repDat.Parse(inputPath, indexId: 1, keep: true); // Perform additional processing steps repDat.ApplyExtras(Extras); repDat.ApplySplitting(GetSplitType(features), false); repDat.ApplyFilter(Filter); repDat.ApplyCleaning(Cleaner); // Now replace the fields from the base DatFile userInputDat.DiffAgainst(repDat, GetBoolean(Features, ByGameValue)); // Finally output the diffed DatFile string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue)); }); } // Output DATs after replacing fields from a base DatFile if (updateMode.HasFlag(UpdateMode.BaseReplace)) { // Loop through each input and apply the base DatFile Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath => { // Parse the path to a new DatFile DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering()); repDat.Parse(inputPath, indexId: 1, keep: true); // Perform additional processing steps repDat.ApplyExtras(Extras); repDat.ApplySplitting(GetSplitType(features), false); repDat.ApplyFilter(Filter); repDat.ApplyCleaning(Cleaner); // Now replace the fields from the base DatFile userInputDat.BaseReplace(repDat, updateFields, GetBoolean(features, OnlySameValue)); // Finally output the replaced DatFile string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue)); }); } // Merge all input files and write // This has to be last due to the SuperDAT handling if (updateMode.HasFlag(UpdateMode.Merge)) { // If we're in SuperDAT mode, prefix all games with their respective DATs if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase)) { userInputDat.ApplySuperDAT(inputPaths); } userInputDat.Write(OutputDir); } } }
public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features) { base.ProcessFeatures(features); // Try to read each input as a batch run file foreach (string path in Inputs) { // If the file doesn't exist, warn but continue if (!File.Exists(path)) { logger.User($"{path} does not exist. Skipping..."); continue; } // Try to process the file now try { // Every line is its own command string[] lines = File.ReadAllLines(path); // Each batch file has its own state int index = 0; DatFile datFile = DatFile.Create(); string outputDirectory = null; // Process each command line foreach (string line in lines) { // Skip empty lines if (string.IsNullOrWhiteSpace(line)) { continue; } // Skip lines that start with REM or # if (line.StartsWith("REM") || line.StartsWith("#")) { continue; } // Read the command in, if possible var command = BatchCommand.Create(line); if (command == null) { logger.User($"Could not process {path} due to the following line: {line}"); break; } // Now switch on the command logger.User($"Attempting to invoke {command.Name} with {(command.Arguments.Count == 0 ? "no arguments" : "the following argument(s): " + string.Join(", ", command.Arguments))}"); switch (command.Name.ToLowerInvariant()) { // Set a header field case "set": if (command.Arguments.Count != 2) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: set(header.field, value);"); continue; } // Read in the individual arguments Field field = command.Arguments[0].AsField(); string value = command.Arguments[1]; // If we had an invalid input, log and continue if (field == Field.NULL) { logger.User($"{command.Arguments[0]} was an invalid field name"); continue; } // Set the header field datFile.Header.SetFields(new Dictionary <Field, string> { [field] = value }); break; // Parse in new input file(s) case "input": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: input(datpath, ...);"); continue; } // Get only files from inputs List <ParentablePath> datFilePaths = DirectoryExtensions.GetFilesOnly(command.Arguments); // Assume there could be multiple foreach (ParentablePath datFilePath in datFilePaths) { datFile.Parse(datFilePath, index++); } break; // Run DFD/D2D on path(s) case "d2d": case "dfd": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: d2d(path, ...);"); continue; } // TODO: Should any of the other options be added for D2D? // Assume there could be multiple foreach (string input in command.Arguments) { datFile.PopulateFromDir(input); } // TODO: We might not want to remove higher order hashes in the future // TODO: We might not want to remove dates in the future Cleaner dfdCleaner = new Cleaner() { ExcludeFields = Hash.DeepHashes.AsFields() }; dfdCleaner.ExcludeFields.Add(Field.DatItem_Date); datFile.ApplyCleaning(dfdCleaner); break; // Apply a filter case "filter": if (command.Arguments.Count < 2 || command.Arguments.Count > 4) { logger.User($"Invoked {command.Name} and expected between 2-4 arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: filter(field, value, [remove = false, [perMachine = false]]);"); continue; } // Read in the individual arguments Field filterField = command.Arguments[0].AsField(); string filterValue = command.Arguments[1]; bool? filterRemove = false; if (command.Arguments.Count >= 3) { filterRemove = command.Arguments[2].AsYesNo(); } bool?filterPerMachine = false; if (command.Arguments.Count >= 4) { filterPerMachine = command.Arguments[3].AsYesNo(); } // If we had an invalid input, log and continue if (filterField == Field.NULL) { logger.User($"{command.Arguments[0]} was an invalid field name"); continue; } if (filterRemove == null) { logger.User($"{command.Arguments[2]} was an invalid true/false value"); continue; } if (filterPerMachine == null) { logger.User($"{command.Arguments[3]} was an invalid true/false value"); continue; } // Create a filter with this new set of fields Filter filter = new Filter(); filter.SetFilter(filterField, filterValue, filterRemove.Value); // Apply the filter blindly datFile.ApplyFilter(filter, filterPerMachine.Value); // Cleanup after the filter // TODO: We might not want to remove immediately datFile.Items.ClearMarked(); datFile.Items.ClearEmpty(); break; // Apply an extra INI case "extra": if (command.Arguments.Count != 2) { logger.User($"Invoked {command.Name} and expected 2 arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: extra(field, inipath);"); continue; } // Read in the individual arguments Field extraField = command.Arguments[0].AsField(); string extraFile = command.Arguments[1]; // If we had an invalid input, log and continue if (extraField == Field.NULL) { logger.User($"{command.Arguments[0]} was an invalid field name"); continue; } if (!File.Exists(command.Arguments[1])) { logger.User($"{command.Arguments[1]} was an invalid file name"); continue; } // Create the extra INI ExtraIni extraIni = new ExtraIni(); ExtraIniItem extraIniItem = new ExtraIniItem(); extraIniItem.PopulateFromFile(extraFile); extraIniItem.Field = extraField; extraIni.Items.Add(extraIniItem); // Apply the extra INI blindly datFile.ApplyExtras(extraIni); break; // Apply internal split/merge case "merge": if (command.Arguments.Count != 1) { logger.User($"Invoked {command.Name} and expected 1 argument, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: merge(split|merged|nonmerged|full|device);"); continue; } // Read in the individual arguments MergingFlag mergingFlag = command.Arguments[0].AsMergingFlag(); // If we had an invalid input, log and continue if (mergingFlag == MergingFlag.None) { logger.User($"{command.Arguments[0]} was an invalid merging flag"); continue; } // Apply the merging flag datFile.ApplySplitting(mergingFlag, false); break; // Apply description-as-name logic case "descname": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: descname();"); continue; } // Apply the logic datFile.MachineDescriptionToName(); break; // Apply 1G1R case "1g1r": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: 1g1r(region, ...);"); continue; } // Run the 1G1R functionality datFile.OneGamePerRegion(command.Arguments); break; // Apply one rom per game (ORPG) case "orpg": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: orpg();"); continue; } // Apply the logic datFile.OneRomPerGame(); break; // Remove a field case "remove": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: remove(field, ...);"); continue; } // Run the removal functionality datFile.RemoveFieldsFromItems(command.Arguments.Select(s => s.AsField()).ToList()); break; // Apply scene date stripping case "sds": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: sds();"); continue; } // Apply the logic datFile.StripSceneDatesFromItems(); break; // Set new output format(s) case "format": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: format(datformat, ...);"); continue; } // Assume there could be multiple datFile.Header.DatFormat = 0x00; foreach (string format in command.Arguments) { datFile.Header.DatFormat |= format.AsDatFormat(); } // If we had an invalid input, log and continue if (datFile.Header.DatFormat == 0x00) { logger.User($"No valid output format found"); continue; } break; // Set output directory case "output": if (command.Arguments.Count != 1) { logger.User($"Invoked {command.Name} and expected exactly 1 argument, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: output(outdir);"); continue; } // Only set the first as the output directory outputDirectory = command.Arguments[0]; break; // Write out the current DatFile case "write": if (command.Arguments.Count > 1) { logger.User($"Invoked {command.Name} and expected 0-1 arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: write([overwrite = true]);"); continue; } // Get overwrite value, if possible bool?overwrite = true; if (command.Arguments.Count == 1) { overwrite = command.Arguments[0].AsYesNo(); } // If we had an invalid input, log and continue if (overwrite == null) { logger.User($"{command.Arguments[0]} was an invalid true/false value"); continue; } // Write out the dat with the current state datFile.Write(outputDirectory, overwrite: overwrite.Value); break; // Reset the internal state case "reset": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: reset();"); continue; } // Reset all state variables index = 0; datFile = DatFile.Create(); outputDirectory = null; break; default: logger.User($"Could not find a match for '{command.Name}'. Please see the help text for more details."); break; } } } catch (Exception ex) { logger.Error(ex, $"There was an exception processing {path}"); continue; } } }
public override void ProcessFeatures(Dictionary <string, Feature> features) { base.ProcessFeatures(features); // Get a list of files from the input datfiles var datfiles = GetList(features, DatListValue); var datfilePaths = DirectoryExtensions.GetFilesOnly(datfiles); // Get feature flags TreatAsFile asFiles = GetTreatAsFiles(features); bool hashOnly = GetBoolean(features, HashOnlyValue); bool quickScan = GetBoolean(features, QuickValue); var splitType = GetSplitType(features); // If we are in individual mode, process each DAT on their own if (GetBoolean(features, IndividualValue)) { foreach (ParentablePath datfile in datfilePaths) { // Parse in from the file DatFile datdata = DatFile.Create(); datdata.Parse(datfile, int.MaxValue, keep: true); // Perform additional processing steps datdata.ApplyExtras(Extras); datdata.ApplySplitting(splitType, true); datdata.ApplyFilter(Filter); datdata.ApplyCleaning(Cleaner); // Set depot information datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation; // If we have overridden the header skipper, set it now if (!string.IsNullOrEmpty(Header.HeaderSkipper)) { datdata.Header.HeaderSkipper = Header.HeaderSkipper; } // If we have the depot flag, respect it if (Header.InputDepot?.IsActive ?? false) { datdata.VerifyDepot(Inputs); } else { // Loop through and add the inputs to check against logger.User("Processing files:\n"); foreach (string input in Inputs) { datdata.PopulateFromDir(input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard); } datdata.VerifyGeneric(hashOnly); } // Now write out if there are any items left datdata.WriteStatsToConsole(); datdata.Write(OutputDir); } } // Otherwise, process all DATs into the same output else { InternalStopwatch watch = new InternalStopwatch("Populating internal DAT"); // Add all of the input DATs into one huge internal DAT DatFile datdata = DatFile.Create(); foreach (ParentablePath datfile in datfilePaths) { datdata.Parse(datfile, int.MaxValue, keep: true); } // Perform additional processing steps datdata.ApplyExtras(Extras); datdata.ApplySplitting(splitType, true); datdata.ApplyFilter(Filter); datdata.ApplyCleaning(Cleaner); // Set depot information datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation; // If we have overridden the header skipper, set it now if (!string.IsNullOrEmpty(Header.HeaderSkipper)) { datdata.Header.HeaderSkipper = Header.HeaderSkipper; } watch.Stop(); // If we have the depot flag, respect it if (Header.InputDepot?.IsActive ?? false) { datdata.VerifyDepot(Inputs); } else { // Loop through and add the inputs to check against logger.User("Processing files:\n"); foreach (string input in Inputs) { datdata.PopulateFromDir(input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard); } datdata.VerifyGeneric(hashOnly); } // Now write out if there are any items left datdata.WriteStatsToConsole(); datdata.Write(OutputDir); } }
public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features) { base.ProcessFeatures(features); // Get feature flags bool addBlankFiles = GetBoolean(features, AddBlankFilesValue); bool addFileDates = GetBoolean(features, AddDateValue); TreatAsFile asFiles = GetTreatAsFiles(features); bool noAutomaticDate = GetBoolean(features, NoAutomaticDateValue); var includeInScan = GetIncludeInScan(features); var skipFileType = GetSkipFileType(features); var splitType = GetSplitType(features); // Apply the specialized field removals to the cleaner if (Cleaner.ExcludeFields == null) { Cleaner.ExcludeFields = new List <Field>(); } if (!addFileDates) { Cleaner.ExcludeFields.Add(Field.DatItem_Date); } // Create a new DATFromDir object and process the inputs DatFile basedat = DatFile.Create(Header); basedat.Header.Date = DateTime.Now.ToString("yyyy-MM-dd"); // For each input directory, create a DAT foreach (string path in Inputs) { if (Directory.Exists(path) || File.Exists(path)) { // Clone the base Dat for information DatFile datdata = DatFile.Create(basedat.Header); // Get the base path and fill the header, if needed string basePath = Path.GetFullPath(path); datdata.FillHeaderFromPath(basePath, noAutomaticDate); // Now populate from the path bool success = datdata.PopulateFromDir( basePath, asFiles, skipFileType, addBlankFiles, hashes: includeInScan); if (success) { // Perform additional processing steps datdata.ApplyExtras(Extras); datdata.ApplySplitting(splitType, false); datdata.ApplyFilter(Filter); datdata.ApplyCleaning(Cleaner); // Write out the file datdata.Write(OutputDir); } else { Console.WriteLine(); OutputRecursive(0); } } } }