public override void ProcessFeatures(Dictionary <string, Feature> features) { base.ProcessFeatures(features); // Get feature flags string name = GetString(features, NameStringValue); string description = GetString(features, DescriptionStringValue); string source = GetString(features, SourceStringValue); string outdat = GetString(features, OutStringValue); // Ensure the output directory DirectoryExtensions.Ensure(outdat, create: true); // Check that all required directories exist if (!Directory.Exists(source)) { logger.Error($"File '{source}' does not exist!"); return; } // Create and write the encapsulating datfile DatFile datfile = DatFile.Create(); datfile.Header.Name = string.IsNullOrWhiteSpace(name) ? "untitled" : name; datfile.Header.Description = description; datfile.PopulateFromDir(source, asFiles: TreatAsFile.NonArchive); datfile.ApplyCleaning(new Cleaner() { ExcludeFields = Hash.DeepHashes.AsFields() }); datfile.Write(outdat); }
/// <summary> /// Wrap creating a dir2dat from a given source /// </summary> /// <param name="outdat">Output file</param> /// <param name="source">Source directory</param> /// <param name="name">Name value in DAT header</param> /// <param name="description">Description value in DAT header</param> private static void InitDir2Dat( string outdat, string source, string name, string description) { // Ensure the output directory Utilities.EnsureOutputDirectory(outdat, create: true); // Check that all required directories exist if (!Directory.Exists(source)) { Globals.Logger.Error("File '{0}' does not exist!", source); return; } // Create the encapsulating datfile DatFile datfile = new DatFile() { Name = (String.IsNullOrWhiteSpace(name) ? "untitled" : name), Description = description, }; // Now run the D2D on the input and write out // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, false /* archivesAsFiles */, SkipFileType.None, false /* addBlanks */, false /* addDate */, _tmpdir, false /* copyFiles */, null /* headerToCheckAgainst */, true /* chdsAsFiles */, null /* filter */); datfile.Write(outDir: outdat); }
/// <summary> /// Wrap creating a DAT file from files or a directory in parallel /// </summary> /// <param name="inputs">List of input filenames</param> /// /* Normal DAT header info */ /// <param name="datHeader">All DatHeader info to be used</param> /// /* Standard DFD info */ /// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param> /// <param name="removeDateFromAutomaticName">True if the date should be omitted from the DAT, false otherwise</param> /// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param> /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param> /// <param name="skipFileType">Type of files that should be skipped on scan</param> /// <param name="addBlankFilesForEmptyFolder">True if blank items should be created for empty folders, false otherwise</param> /// <param name="addFileDates">True if dates should be archived for all files, false otherwise</param> /// <param name="filter">Filter object to be passed to the DatItem level</param> /// /* Output DAT info */ /// <param name="tempDir">Name of the directory to create a temp folder in (blank is default temp directory)</param> /// <param name="outDir">Name of the directory to output the DAT to (blank is the current directory)</param> /// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param> /// /* Filtering info */ /// <param name="filter">Filter object to be passed to the DatItem level</param> private static void InitDatFromDir(List <string> inputs, /* Normal DAT header info */ DatHeader datHeader, /* Standard DFD info */ Hash omitFromScan, bool removeDateFromAutomaticName, bool archivesAsFiles, bool chdsAsFiles, SkipFileType skipFileType, bool addBlankFilesForEmptyFolder, bool addFileDates, /* Output DAT info */ string tempDir, string outDir, bool copyFiles, /* Filtering info */ Filter filter) { // Create a new DATFromDir object and process the inputs DatFile basedat = new DatFile(datHeader) { Date = DateTime.Now.ToString("yyyy-MM-dd"), }; // For each input directory, create a DAT foreach (string path in inputs) { if (Directory.Exists(path) || File.Exists(path)) { // Clone the base Dat for information DatFile datdata = new DatFile(basedat); string basePath = Path.GetFullPath(path); bool success = datdata.PopulateFromDir(basePath, omitFromScan, removeDateFromAutomaticName, archivesAsFiles, skipFileType, addBlankFilesForEmptyFolder, addFileDates, tempDir, copyFiles, datHeader.Header, chdsAsFiles, filter); // If it was a success, write the DAT out if (success) { datdata.Write(outDir); } // Otherwise, show the help else { Console.WriteLine(); _help.OutputIndividualFeature("DATFromDir"); } } } }
public override void ProcessFeatures(Dictionary <string, Feature> features) { base.ProcessFeatures(features); // Get a list of files from the input datfiles var datfiles = GetList(features, DatListValue); var datfilePaths = DirectoryExtensions.GetFilesOnly(datfiles); // Get feature flags TreatAsFile asFiles = GetTreatAsFiles(features); bool hashOnly = GetBoolean(features, HashOnlyValue); bool quickScan = GetBoolean(features, QuickValue); var splitType = GetSplitType(features); // If we are in individual mode, process each DAT on their own if (GetBoolean(features, IndividualValue)) { foreach (ParentablePath datfile in datfilePaths) { // Parse in from the file DatFile datdata = DatFile.Create(); datdata.Parse(datfile, int.MaxValue, keep: true); // Perform additional processing steps datdata.ApplyExtras(Extras); datdata.ApplySplitting(splitType, true); datdata.ApplyFilter(Filter); datdata.ApplyCleaning(Cleaner); // Set depot information datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation; // If we have overridden the header skipper, set it now if (!string.IsNullOrEmpty(Header.HeaderSkipper)) { datdata.Header.HeaderSkipper = Header.HeaderSkipper; } // If we have the depot flag, respect it if (Header.InputDepot?.IsActive ?? false) { datdata.VerifyDepot(Inputs); } else { // Loop through and add the inputs to check against logger.User("Processing files:\n"); foreach (string input in Inputs) { datdata.PopulateFromDir(input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard); } datdata.VerifyGeneric(hashOnly); } // Now write out if there are any items left datdata.WriteStatsToConsole(); datdata.Write(OutputDir); } } // Otherwise, process all DATs into the same output else { InternalStopwatch watch = new InternalStopwatch("Populating internal DAT"); // Add all of the input DATs into one huge internal DAT DatFile datdata = DatFile.Create(); foreach (ParentablePath datfile in datfilePaths) { datdata.Parse(datfile, int.MaxValue, keep: true); } // Perform additional processing steps datdata.ApplyExtras(Extras); datdata.ApplySplitting(splitType, true); datdata.ApplyFilter(Filter); datdata.ApplyCleaning(Cleaner); // Set depot information datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation; // If we have overridden the header skipper, set it now if (!string.IsNullOrEmpty(Header.HeaderSkipper)) { datdata.Header.HeaderSkipper = Header.HeaderSkipper; } watch.Stop(); // If we have the depot flag, respect it if (Header.InputDepot?.IsActive ?? false) { datdata.VerifyDepot(Inputs); } else { // Loop through and add the inputs to check against logger.User("Processing files:\n"); foreach (string input in Inputs) { datdata.PopulateFromDir(input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard); } datdata.VerifyGeneric(hashOnly); } // Now write out if there are any items left datdata.WriteStatsToConsole(); datdata.Write(OutputDir); } }
public override void ProcessFeatures(Dictionary <string, SabreTools.Library.Help.Feature> features) { base.ProcessFeatures(features); // Get the archive scanning level // TODO: Remove usage int sevenzip = GetInt32(features, Include7ZipsInt32Value); int gz = GetInt32(features, IncludeGZipsInt32Value); int zip = GetInt32(features, IncludeZipsInt32Value); // Get feature flags bool noDb = GetBoolean(features, NoDbValue); bool onlyNeeded = GetBoolean(features, OnlyNeededValue); // First we want to get just all directories from the inputs List <string> onlyDirs = new List <string>(); foreach (string input in Inputs) { if (Directory.Exists(input)) { onlyDirs.Add(Path.GetFullPath(input)); } } // Then process all of the input directories into an internal DAT DatFile df = DatFile.Create(); foreach (string dir in onlyDirs) { df.PopulateFromDir(dir, asFiles: TreatAsFile.NonArchive); df.PopulateFromDir(dir, asFiles: TreatAsFile.All); } // Create an empty Dat for files that need to be rebuilt DatFile need = DatFile.Create(); // Open the database connection SqliteConnection dbc = new SqliteConnection(_connectionString); dbc.Open(); // Now that we have the Dats, add the files to the database string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; string sha1query = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES"; string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; foreach (string key in df.Items.Keys) { List <DatItem> datItems = df.Items[key]; foreach (Rom rom in datItems) { // If we care about if the file exists, check the databse first if (onlyNeeded && !noDb) { string query = "SELECT * FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1" + $" WHERE crcsha1.crc=\"{rom.CRC}\"" + $" OR md5sha1.md5=\"{rom.MD5}\"" + $" OR md5sha1.sha1=\"{rom.SHA1}\""; SqliteCommand slc = new SqliteCommand(query, dbc); SqliteDataReader sldr = slc.ExecuteReader(); if (sldr.HasRows) { // Add to the queries if (!string.IsNullOrWhiteSpace(rom.CRC)) { crcquery += $" (\"{rom.CRC}\"),"; } if (!string.IsNullOrWhiteSpace(rom.MD5)) { md5query += $" (\"{rom.MD5}\"),"; } if (!string.IsNullOrWhiteSpace(rom.SHA1)) { sha1query += $" (\"{rom.SHA1}\", \"{_depots.Keys.ToList()[0]}\"),"; if (!string.IsNullOrWhiteSpace(rom.CRC)) { crcsha1query += $" (\"{rom.CRC}\", \"{rom.SHA1}\"),"; } if (!string.IsNullOrWhiteSpace(rom.MD5)) { md5sha1query += $" (\"{rom.MD5}\", \"{rom.SHA1}\"),"; } } // Add to the Dat need.Items.Add(key, rom); } } // Otherwise, just add the file to the list else { // Add to the queries if (!noDb) { if (!string.IsNullOrWhiteSpace(rom.CRC)) { crcquery += $" (\"{rom.CRC}\"),"; } if (!string.IsNullOrWhiteSpace(rom.MD5)) { md5query += $" (\"{rom.MD5}\"),"; } if (!string.IsNullOrWhiteSpace(rom.SHA1)) { sha1query += $" (\"{rom.SHA1}\", \"{_depots.Keys.ToList()[0]}\"),"; if (!string.IsNullOrWhiteSpace(rom.CRC)) { crcsha1query += $" (\"{rom.CRC}\", \"{rom.SHA1}\"),"; } if (!string.IsNullOrWhiteSpace(rom.MD5)) { md5sha1query += $" (\"{rom.MD5}\", \"{rom.SHA1}\"),"; } } } // Add to the Dat need.Items.Add(key, rom); } } } // Now run the queries, if they're populated if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") { SqliteCommand slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); slc.ExecuteNonQuery(); slc.Dispose(); } if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") { SqliteCommand slc = new SqliteCommand(md5query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); slc.Dispose(); } if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES") { SqliteCommand slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); slc.Dispose(); } if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") { SqliteCommand slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); slc.Dispose(); } if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") { SqliteCommand slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); slc.Dispose(); } // Create the sorting object to use and rebuild the needed files need.RebuildGeneric( onlyDirs, outDir: _depots.Keys.ToList()[0], outputFormat: OutputFormat.TorrentGzipRomba, asFiles: TreatAsFile.NonArchive); }
public override void ProcessFeatures(Dictionary <string, SabreTools.Library.Help.Feature> features) { base.ProcessFeatures(features); logger.Error("This feature is not yet implemented: rescan-depots"); foreach (string depotname in Inputs) { // Check that it's a valid depot first if (!_depots.ContainsKey(depotname)) { logger.User($"'{depotname}' is not a recognized depot. Please add it to your configuration file and try again"); return; } // Then check that the depot is online if (!Directory.Exists(depotname)) { logger.User($"'{depotname}' does not appear to be online. Please check its status and try again"); return; } // Open the database connection SqliteConnection dbc = new SqliteConnection(_connectionString); dbc.Open(); // If we have it, then check for all hashes that are in that depot List <string> hashes = new List <string>(); string query = $"SELECT sha1 FROM sha1 WHERE depot=\"{depotname}\""; SqliteCommand slc = new SqliteCommand(query, dbc); SqliteDataReader sldr = slc.ExecuteReader(); if (sldr.HasRows) { while (sldr.Read()) { hashes.Add(sldr.GetString(0)); } } // Now rescan the depot itself DatFile depot = DatFile.Create(); depot.PopulateFromDir(depotname, asFiles: TreatAsFile.NonArchive); depot.Items.BucketBy(Field.DatItem_SHA1, DedupeType.None); // Set the base queries to use string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; string sha1query = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES"; string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; // Once we have both, check for any new files List <string> dupehashes = new List <string>(); IEnumerable <string> keys = depot.Items.Keys; foreach (string key in keys) { List <DatItem> roms = depot.Items[key]; foreach (Rom rom in roms) { if (hashes.Contains(rom.SHA1)) { dupehashes.Add(rom.SHA1); hashes.Remove(rom.SHA1); } else if (!dupehashes.Contains(rom.SHA1)) { if (!string.IsNullOrWhiteSpace(rom.CRC)) { crcquery += $" (\"{rom.CRC}\"),"; } if (!string.IsNullOrWhiteSpace(rom.MD5)) { md5query += $" (\"{rom.MD5}\"),"; } if (!string.IsNullOrWhiteSpace(rom.SHA1)) { sha1query += $" (\"{rom.SHA1}\", \"{depotname}\"),"; if (!string.IsNullOrWhiteSpace(rom.CRC)) { crcsha1query += $" (\"{rom.CRC}\", \"{rom.SHA1}\"),"; } if (!string.IsNullOrWhiteSpace(rom.MD5)) { md5sha1query += $" (\"{rom.MD5}\", \"{rom.SHA1}\"),"; } } } } } // Now run the queries after fixing them if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") { slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); slc.ExecuteNonQuery(); } if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") { slc = new SqliteCommand(md5query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); } if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES") { slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); } if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") { slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); } if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") { slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); } // Now that we've added the information, we get to remove all of the hashes that we want to query = @"DELETE FROM sha1 JOIN crcsha1 ON sha1.sha1=crcsha1.sha1 JOIN md5sha1 ON sha1.sha1=md5sha1.sha1 JOIN crc ON crcsha1.crc=crc.crc JOIN md5 ON md5sha1.md5=md5.md5 WHERE sha1.sha1 IN "; query += $"({string.Join("\",\"", hashes)}\")"; slc = new SqliteCommand(query, dbc); slc.ExecuteNonQuery(); // Dispose of the database connection slc.Dispose(); dbc.Dispose(); } }
public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features) { base.ProcessFeatures(features); // Try to read each input as a batch run file foreach (string path in Inputs) { // If the file doesn't exist, warn but continue if (!File.Exists(path)) { logger.User($"{path} does not exist. Skipping..."); continue; } // Try to process the file now try { // Every line is its own command string[] lines = File.ReadAllLines(path); // Each batch file has its own state int index = 0; DatFile datFile = DatFile.Create(); string outputDirectory = null; // Process each command line foreach (string line in lines) { // Skip empty lines if (string.IsNullOrWhiteSpace(line)) { continue; } // Skip lines that start with REM or # if (line.StartsWith("REM") || line.StartsWith("#")) { continue; } // Read the command in, if possible var command = BatchCommand.Create(line); if (command == null) { logger.User($"Could not process {path} due to the following line: {line}"); break; } // Now switch on the command logger.User($"Attempting to invoke {command.Name} with {(command.Arguments.Count == 0 ? "no arguments" : "the following argument(s): " + string.Join(", ", command.Arguments))}"); switch (command.Name.ToLowerInvariant()) { // Set a header field case "set": if (command.Arguments.Count != 2) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: set(header.field, value);"); continue; } // Read in the individual arguments Field field = command.Arguments[0].AsField(); string value = command.Arguments[1]; // If we had an invalid input, log and continue if (field == Field.NULL) { logger.User($"{command.Arguments[0]} was an invalid field name"); continue; } // Set the header field datFile.Header.SetFields(new Dictionary <Field, string> { [field] = value }); break; // Parse in new input file(s) case "input": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: input(datpath, ...);"); continue; } // Get only files from inputs List <ParentablePath> datFilePaths = DirectoryExtensions.GetFilesOnly(command.Arguments); // Assume there could be multiple foreach (ParentablePath datFilePath in datFilePaths) { datFile.Parse(datFilePath, index++); } break; // Run DFD/D2D on path(s) case "d2d": case "dfd": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: d2d(path, ...);"); continue; } // TODO: Should any of the other options be added for D2D? // Assume there could be multiple foreach (string input in command.Arguments) { datFile.PopulateFromDir(input); } // TODO: We might not want to remove higher order hashes in the future // TODO: We might not want to remove dates in the future Cleaner dfdCleaner = new Cleaner() { ExcludeFields = Hash.DeepHashes.AsFields() }; dfdCleaner.ExcludeFields.Add(Field.DatItem_Date); datFile.ApplyCleaning(dfdCleaner); break; // Apply a filter case "filter": if (command.Arguments.Count < 2 || command.Arguments.Count > 4) { logger.User($"Invoked {command.Name} and expected between 2-4 arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: filter(field, value, [remove = false, [perMachine = false]]);"); continue; } // Read in the individual arguments Field filterField = command.Arguments[0].AsField(); string filterValue = command.Arguments[1]; bool? filterRemove = false; if (command.Arguments.Count >= 3) { filterRemove = command.Arguments[2].AsYesNo(); } bool?filterPerMachine = false; if (command.Arguments.Count >= 4) { filterPerMachine = command.Arguments[3].AsYesNo(); } // If we had an invalid input, log and continue if (filterField == Field.NULL) { logger.User($"{command.Arguments[0]} was an invalid field name"); continue; } if (filterRemove == null) { logger.User($"{command.Arguments[2]} was an invalid true/false value"); continue; } if (filterPerMachine == null) { logger.User($"{command.Arguments[3]} was an invalid true/false value"); continue; } // Create a filter with this new set of fields Filter filter = new Filter(); filter.SetFilter(filterField, filterValue, filterRemove.Value); // Apply the filter blindly datFile.ApplyFilter(filter, filterPerMachine.Value); // Cleanup after the filter // TODO: We might not want to remove immediately datFile.Items.ClearMarked(); datFile.Items.ClearEmpty(); break; // Apply an extra INI case "extra": if (command.Arguments.Count != 2) { logger.User($"Invoked {command.Name} and expected 2 arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: extra(field, inipath);"); continue; } // Read in the individual arguments Field extraField = command.Arguments[0].AsField(); string extraFile = command.Arguments[1]; // If we had an invalid input, log and continue if (extraField == Field.NULL) { logger.User($"{command.Arguments[0]} was an invalid field name"); continue; } if (!File.Exists(command.Arguments[1])) { logger.User($"{command.Arguments[1]} was an invalid file name"); continue; } // Create the extra INI ExtraIni extraIni = new ExtraIni(); ExtraIniItem extraIniItem = new ExtraIniItem(); extraIniItem.PopulateFromFile(extraFile); extraIniItem.Field = extraField; extraIni.Items.Add(extraIniItem); // Apply the extra INI blindly datFile.ApplyExtras(extraIni); break; // Apply internal split/merge case "merge": if (command.Arguments.Count != 1) { logger.User($"Invoked {command.Name} and expected 1 argument, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: merge(split|merged|nonmerged|full|device);"); continue; } // Read in the individual arguments MergingFlag mergingFlag = command.Arguments[0].AsMergingFlag(); // If we had an invalid input, log and continue if (mergingFlag == MergingFlag.None) { logger.User($"{command.Arguments[0]} was an invalid merging flag"); continue; } // Apply the merging flag datFile.ApplySplitting(mergingFlag, false); break; // Apply description-as-name logic case "descname": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: descname();"); continue; } // Apply the logic datFile.MachineDescriptionToName(); break; // Apply 1G1R case "1g1r": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: 1g1r(region, ...);"); continue; } // Run the 1G1R functionality datFile.OneGamePerRegion(command.Arguments); break; // Apply one rom per game (ORPG) case "orpg": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: orpg();"); continue; } // Apply the logic datFile.OneRomPerGame(); break; // Remove a field case "remove": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: remove(field, ...);"); continue; } // Run the removal functionality datFile.RemoveFieldsFromItems(command.Arguments.Select(s => s.AsField()).ToList()); break; // Apply scene date stripping case "sds": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: sds();"); continue; } // Apply the logic datFile.StripSceneDatesFromItems(); break; // Set new output format(s) case "format": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: format(datformat, ...);"); continue; } // Assume there could be multiple datFile.Header.DatFormat = 0x00; foreach (string format in command.Arguments) { datFile.Header.DatFormat |= format.AsDatFormat(); } // If we had an invalid input, log and continue if (datFile.Header.DatFormat == 0x00) { logger.User($"No valid output format found"); continue; } break; // Set output directory case "output": if (command.Arguments.Count != 1) { logger.User($"Invoked {command.Name} and expected exactly 1 argument, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: output(outdir);"); continue; } // Only set the first as the output directory outputDirectory = command.Arguments[0]; break; // Write out the current DatFile case "write": if (command.Arguments.Count > 1) { logger.User($"Invoked {command.Name} and expected 0-1 arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: write([overwrite = true]);"); continue; } // Get overwrite value, if possible bool?overwrite = true; if (command.Arguments.Count == 1) { overwrite = command.Arguments[0].AsYesNo(); } // If we had an invalid input, log and continue if (overwrite == null) { logger.User($"{command.Arguments[0]} was an invalid true/false value"); continue; } // Write out the dat with the current state datFile.Write(outputDirectory, overwrite: overwrite.Value); break; // Reset the internal state case "reset": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: reset();"); continue; } // Reset all state variables index = 0; datFile = DatFile.Create(); outputDirectory = null; break; default: logger.User($"Could not find a match for '{command.Name}'. Please see the help text for more details."); break; } } } catch (Exception ex) { logger.Error(ex, $"There was an exception processing {path}"); continue; } } }
/// <summary> /// Wrap rescanning depots /// </summary> /// <param name="inputs">List of depots to rescan, empty means all</param> /// TODO: Verify implementation private static void InitRescanDepots(List <string> inputs) { Globals.Logger.Error("This feature is not yet implemented: rescan-depots"); foreach (string depotname in inputs) { // Check that it's a valid depot first if (!_depots.ContainsKey(depotname)) { Globals.Logger.User("'{0}' is not a recognized depot. Please add it to your configuration file and try again", depotname); return; } // Then check that the depot is online if (!Directory.Exists(depotname)) { Globals.Logger.User("'{0}' does not appear to be online. Please check its status and try again", depotname); return; } // Open the database connection SqliteConnection dbc = new SqliteConnection(_connectionString); dbc.Open(); // If we have it, then check for all hashes that are in that depot List <string> hashes = new List <string>(); string query = "SELECT sha1 FROM sha1 WHERE depot=\"" + depotname + "\""; SqliteCommand slc = new SqliteCommand(query, dbc); SqliteDataReader sldr = slc.ExecuteReader(); if (sldr.HasRows) { while (sldr.Read()) { hashes.Add(sldr.GetString(0)); } } // Now rescan the depot itself DatFile depot = new DatFile(); // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null); depot.BucketBy(SortedBy.SHA1, DedupeType.None); // Set the base queries to use string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; string sha1query = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES"; string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; // Once we have both, check for any new files List <string> dupehashes = new List <string>(); List <string> keys = depot.Keys; foreach (string key in keys) { List <DatItem> roms = depot[key]; foreach (Rom rom in roms) { if (hashes.Contains(rom.SHA1)) { dupehashes.Add(rom.SHA1); hashes.Remove(rom.SHA1); } else if (!dupehashes.Contains(rom.SHA1)) { if (!String.IsNullOrWhiteSpace(rom.CRC)) { crcquery += " (\"" + rom.CRC + "\"),"; } if (!String.IsNullOrWhiteSpace(rom.MD5)) { md5query += " (\"" + rom.MD5 + "\"),"; } if (!String.IsNullOrWhiteSpace(rom.SHA1)) { sha1query += " (\"" + rom.SHA1 + "\", \"" + depotname + "\"),"; if (!String.IsNullOrWhiteSpace(rom.CRC)) { crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; } if (!String.IsNullOrWhiteSpace(rom.MD5)) { md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; } } } } } // Now run the queries after fixing them if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") { slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); slc.ExecuteNonQuery(); } if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") { slc = new SqliteCommand(md5query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); } if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES") { slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); } if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") { slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); } if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") { slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); } // Now that we've added the information, we get to remove all of the hashes that we want to query = @"DELETE FROM sha1 JOIN crcsha1 ON sha1.sha1=crcsha1.sha1 JOIN md5sha1 ON sha1.sha1=md5sha1.sha1 JOIN crc ON crcsha1.crc=crc.crc JOIN md5 ON md5sha1.md5=md5.md5 WHERE sha1.sha1 IN (""" + String.Join("\",\"", hashes) + "\")"; slc = new SqliteCommand(query, dbc); slc.ExecuteNonQuery(); // Dispose of the database connection slc.Dispose(); dbc.Dispose(); } }
/// <summary> /// Wrap refreshing the database with potentially new dats /// </summary> /// <param name="workers">How many workers to launch for the job, default from config</param> /// <param name="missingSha1s">Write paths of dats with missing sha1s into this file</param> private static void InitRefreshDats( int workers, string missingSha1s) { // Make sure the db is set if (String.IsNullOrWhiteSpace(_db)) { _db = "db.sqlite"; _connectionString = "Data Source=" + _db + ";Version = 3;"; } // Make sure the file exists if (!File.Exists(_db)) { DatabaseTools.EnsureDatabase(_dbSchema, _db, _connectionString); } // Make sure the dats dir is set if (String.IsNullOrWhiteSpace(_dats)) { _dats = "dats"; } _dats = Path.Combine(Globals.ExeDir, _dats); // Make sure the folder exists if (!Directory.Exists(_dats)) { Directory.CreateDirectory(_dats); } // First get a list of SHA-1's from the input DATs DatFile datroot = new DatFile { Type = "SuperDAT", }; // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null); datroot.BucketBy(SortedBy.SHA1, DedupeType.None); // Create a List of dat hashes in the database (SHA-1) List <string> databaseDats = new List <string>(); List <string> unneeded = new List <string>(); SqliteConnection dbc = new SqliteConnection(_connectionString); dbc.Open(); // Populate the List from the database InternalStopwatch watch = new InternalStopwatch("Populating the list of existing DATs"); string query = "SELECT DISTINCT hash FROM dat"; SqliteCommand slc = new SqliteCommand(query, dbc); SqliteDataReader sldr = slc.ExecuteReader(); if (sldr.HasRows) { sldr.Read(); string hash = sldr.GetString(0); if (datroot.Contains(hash)) { datroot.Remove(hash); databaseDats.Add(hash); } else if (!databaseDats.Contains(hash)) { unneeded.Add(hash); } } datroot.BucketBy(SortedBy.Game, DedupeType.None, norename: true); watch.Stop(); slc.Dispose(); sldr.Dispose(); // Loop through the Dictionary and add all data watch.Start("Adding new DAT information"); foreach (string key in datroot.Keys) { foreach (Rom value in datroot[key]) { AddDatToDatabase(value, dbc); } } watch.Stop(); // Now loop through and remove all references to old Dats if (unneeded.Count > 0) { watch.Start("Removing unmatched DAT information"); query = "DELETE FROM dat WHERE"; foreach (string dathash in unneeded) { query += " OR hash=\"" + dathash + "\""; } query = query.Replace("WHERE OR", "WHERE"); slc = new SqliteCommand(query, dbc); slc.ExecuteNonQuery(); slc.Dispose(); watch.Stop(); } dbc.Dispose(); }
/// <summary> /// Wrap adding files to the depots /// </summary> /// <param name="inputs">List of input folders to use</param> /// <param name="onlyNeeded">True if only files in the database and don't exist are added, false otherwise</param> /// <param name="resume">Resume a previously interrupted operation from the specified path</param> /// <param name="includeZips">flag value == 0 means: add Zip files themselves into the depot in addition to their contents, flag value == 2 means add Zip files themselves but don't add content</param> /// <param name="workers">How many workers to launch for the job, default from config</param> /// <param name="includeGZips">flag value == 0 means: add GZip files themselves into the depot in addition to their contents, flag value == 2 means add GZip files themselves but don't add content</param> /// <param name="include7Zips">flag value == 0 means: add 7Zip files themselves into the depot in addition to their contents, flag value == 2 means add 7Zip files themselves but don't add content</param> /// <param name="skipInitialScan">True to skip the initial scan of the files to determine amount of work, false otherwise</param> /// <param name="useGolangZip">True to use go zip implementation instead of zlib, false otherwise</param> /// <param name="noDb">True to archive into depot but do not touch DB index and ignore only-needed flag, false otherwise</param> /// TODO: Add ability to update .romba files with proper size AND use the correct depot if it fills up /// TODO: Add ability correctly to mark which depot the files are being rebuilt to in the DB private static void InitArchive( List <string> inputs, bool onlyNeeded, string resume, int includeZips, int workers, int includeGZips, int include7Zips, bool skipInitialScan, bool useGolangZip, // Obsolete bool noDb) { // First we want to get just all directories from the inputs List <string> onlyDirs = new List <string>(); foreach (string input in inputs) { if (Directory.Exists(input)) { onlyDirs.Add(Path.GetFullPath(input)); } } // Then process all of the input directories into an internal DAT DatFile df = new DatFile(); foreach (string dir in onlyDirs) { // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null); df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, null, true, null); } // Create an empty Dat for files that need to be rebuilt DatFile need = new DatFile(); // Open the database connection SqliteConnection dbc = new SqliteConnection(_connectionString); dbc.Open(); // Now that we have the Dats, add the files to the database string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; string sha1query = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES"; string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; foreach (string key in df.Keys) { List <DatItem> datItems = df[key]; foreach (Rom rom in datItems) { // If we care about if the file exists, check the databse first if (onlyNeeded && !noDb) { string query = "SELECT * FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1" + " WHERE crcsha1.crc=\"" + rom.CRC + "\"" + " OR md5sha1.md5=\"" + rom.MD5 + "\"" + " OR md5sha1.sha1=\"" + rom.SHA1 + "\""; SqliteCommand slc = new SqliteCommand(query, dbc); SqliteDataReader sldr = slc.ExecuteReader(); if (sldr.HasRows) { // Add to the queries if (!String.IsNullOrWhiteSpace(rom.CRC)) { crcquery += " (\"" + rom.CRC + "\"),"; } if (!String.IsNullOrWhiteSpace(rom.MD5)) { md5query += " (\"" + rom.MD5 + "\"),"; } if (!String.IsNullOrWhiteSpace(rom.SHA1)) { sha1query += " (\"" + rom.SHA1 + "\", \"" + _depots.Keys.ToList()[0] + "\"),"; if (!String.IsNullOrWhiteSpace(rom.CRC)) { crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; } if (!String.IsNullOrWhiteSpace(rom.MD5)) { md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; } } // Add to the Dat need.Add(key, rom); } } // Otherwise, just add the file to the list else { // Add to the queries if (!noDb) { if (!String.IsNullOrWhiteSpace(rom.CRC)) { crcquery += " (\"" + rom.CRC + "\"),"; } if (!String.IsNullOrWhiteSpace(rom.MD5)) { md5query += " (\"" + rom.MD5 + "\"),"; } if (!String.IsNullOrWhiteSpace(rom.SHA1)) { sha1query += " (\"" + rom.SHA1 + "\", \"" + _depots.Keys.ToList()[0] + "\"),"; if (!String.IsNullOrWhiteSpace(rom.CRC)) { crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; } if (!String.IsNullOrWhiteSpace(rom.MD5)) { md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; } } } // Add to the Dat need.Add(key, rom); } } } // Now run the queries, if they're populated if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") { SqliteCommand slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); slc.ExecuteNonQuery(); slc.Dispose(); } if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") { SqliteCommand slc = new SqliteCommand(md5query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); slc.Dispose(); } if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES") { SqliteCommand slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); slc.Dispose(); } if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") { SqliteCommand slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); slc.Dispose(); } if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") { SqliteCommand slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); slc.ExecuteNonQuery(); slc.Dispose(); } // Create the sorting object to use and rebuild the needed files ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(include7Zips, includeGZips, 2, includeZips); need.RebuildGeneric(onlyDirs, _depots.Keys.ToList()[0], false /*quickScan*/, false /*date*/, false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzip, true /*romba*/, asl, false /*updateDat*/, null /*headerToCheckAgainst*/, true /* chdsAsFiles */); }
public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features) { base.ProcessFeatures(features); // Get feature flags bool addBlankFiles = GetBoolean(features, AddBlankFilesValue); bool addFileDates = GetBoolean(features, AddDateValue); TreatAsFile asFiles = GetTreatAsFiles(features); bool noAutomaticDate = GetBoolean(features, NoAutomaticDateValue); var includeInScan = GetIncludeInScan(features); var skipFileType = GetSkipFileType(features); var splitType = GetSplitType(features); // Apply the specialized field removals to the cleaner if (Cleaner.ExcludeFields == null) { Cleaner.ExcludeFields = new List <Field>(); } if (!addFileDates) { Cleaner.ExcludeFields.Add(Field.DatItem_Date); } // Create a new DATFromDir object and process the inputs DatFile basedat = DatFile.Create(Header); basedat.Header.Date = DateTime.Now.ToString("yyyy-MM-dd"); // For each input directory, create a DAT foreach (string path in Inputs) { if (Directory.Exists(path) || File.Exists(path)) { // Clone the base Dat for information DatFile datdata = DatFile.Create(basedat.Header); // Get the base path and fill the header, if needed string basePath = Path.GetFullPath(path); datdata.FillHeaderFromPath(basePath, noAutomaticDate); // Now populate from the path bool success = datdata.PopulateFromDir( basePath, asFiles, skipFileType, addBlankFiles, hashes: includeInScan); if (success) { // Perform additional processing steps datdata.ApplyExtras(Extras); datdata.ApplySplitting(splitType, false); datdata.ApplyFilter(Filter); datdata.ApplyCleaning(Cleaner); // Write out the file datdata.Write(OutputDir); } else { Console.WriteLine(); OutputRecursive(0); } } } }
public override void ProcessFeatures(Dictionary <string, SabreTools.Library.Help.Feature> features) { base.ProcessFeatures(features); // Get feature flags int workers = GetInt32(features, WorkersInt32Value); string missingSha1s = GetString(features, MissingSha1sStringValue); // Make sure the db is set if (string.IsNullOrWhiteSpace(_db)) { _db = "db.sqlite"; _connectionString = $"Data Source={_db};Version = 3;"; } // Make sure the file exists if (!File.Exists(_db)) { EnsureDatabase(_db, _connectionString); } // Make sure the dats dir is set if (string.IsNullOrWhiteSpace(_dats)) { _dats = "dats"; } _dats = Path.Combine(Globals.ExeDir, _dats); // Make sure the folder exists if (!Directory.Exists(_dats)) { Directory.CreateDirectory(_dats); } // First get a list of SHA-1's from the input DATs DatFile datroot = DatFile.Create(); datroot.Header.Type = "SuperDAT"; datroot.PopulateFromDir(_dats, asFiles: TreatAsFile.NonArchive); datroot.Items.BucketBy(Field.DatItem_SHA1, DedupeType.None); // Create a List of dat hashes in the database (SHA-1) List <string> databaseDats = new List <string>(); List <string> unneeded = new List <string>(); SqliteConnection dbc = new SqliteConnection(_connectionString); dbc.Open(); // Populate the List from the database InternalStopwatch watch = new InternalStopwatch("Populating the list of existing DATs"); string query = "SELECT DISTINCT hash FROM dat"; SqliteCommand slc = new SqliteCommand(query, dbc); SqliteDataReader sldr = slc.ExecuteReader(); if (sldr.HasRows) { sldr.Read(); string hash = sldr.GetString(0); if (datroot.Items.ContainsKey(hash)) { datroot.Items.Remove(hash); databaseDats.Add(hash); } else if (!databaseDats.Contains(hash)) { unneeded.Add(hash); } } datroot.Items.BucketBy(Field.Machine_Name, DedupeType.None, norename: true); watch.Stop(); slc.Dispose(); sldr.Dispose(); // Loop through the Dictionary and add all data watch.Start("Adding new DAT information"); foreach (string key in datroot.Items.Keys) { foreach (Rom value in datroot.Items[key]) { AddDatToDatabase(value, dbc); } } watch.Stop(); // Now loop through and remove all references to old Dats if (unneeded.Count > 0) { watch.Start("Removing unmatched DAT information"); query = "DELETE FROM dat WHERE"; foreach (string dathash in unneeded) { query += $" OR hash=\"{dathash}\""; } query = query.Replace("WHERE OR", "WHERE"); slc = new SqliteCommand(query, dbc); slc.ExecuteNonQuery(); slc.Dispose(); watch.Stop(); } dbc.Dispose(); }