/// <summary> /// Populate from multiple paths while returning the invividual headers /// </summary> /// <param name="datFile">Current DatFile object to use for updating</param> /// <param name="inputs">Paths to DATs to parse</param> /// <returns>List of DatHeader objects representing headers</returns> public static List <DatHeader> PopulateUserData(DatFile datFile, List <ParentablePath> inputs) { DatFile[] datFiles = new DatFile[inputs.Count]; InternalStopwatch watch = new InternalStopwatch("Processing individual DATs"); // Parse all of the DATs into their own DatFiles in the array Parallel.For(0, inputs.Count, Globals.ParallelOptions, i => { var input = inputs[i]; logger.User($"Adding DAT: {input.CurrentPath}"); datFiles[i] = DatFile.Create(datFile.Header.CloneFiltering()); Parser.ParseInto(datFiles[i], input, i, keep: true); }); watch.Stop(); watch.Start("Populating internal DAT"); for (int i = 0; i < inputs.Count; i++) { AddFromExisting(datFile, datFiles[i], true); } watch.Stop(); return(datFiles.Select(d => d.Header).ToList()); }
/// <summary> /// Output duplicate item diff /// </summary> /// <param name="datFile">Current DatFile object to use for updating</param> /// <param name="inputs">List of inputs to write out from</param> public static DatFile DiffDuplicates(DatFile datFile, List <ParentablePath> inputs) { InternalStopwatch watch = new InternalStopwatch("Initializing duplicate DAT"); // Fill in any information not in the base DAT if (string.IsNullOrWhiteSpace(datFile.Header.FileName)) { datFile.Header.FileName = "All DATs"; } if (string.IsNullOrWhiteSpace(datFile.Header.Name)) { datFile.Header.Name = "datFile.All DATs"; } if (string.IsNullOrWhiteSpace(datFile.Header.Description)) { datFile.Header.Description = "datFile.All DATs"; } string post = " (Duplicates)"; DatFile dupeData = DatFile.Create(datFile.Header); dupeData.Header.FileName += post; dupeData.Header.Name += post; dupeData.Header.Description += post; dupeData.Items = new ItemDictionary(); watch.Stop(); // Now, loop through the dictionary and populate the correct DATs watch.Start("Populating duplicate DAT"); Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key => { ConcurrentList <DatItem> items = DatItem.Merge(datFile.Items[key]); // If the rom list is empty or null, just skip it if (items == null || items.Count == 0) { return; } // Loop through and add the items correctly foreach (DatItem item in items) { if (item.DupeType.HasFlag(DupeType.External)) { DatItem newrom = item.Clone() as DatItem; newrom.Machine.Name += $" ({Path.GetFileNameWithoutExtension(inputs[item.Source.Index].CurrentPath)})"; dupeData.Items.Add(key, newrom); } } }); watch.Stop(); return(dupeData); }
public override bool ProcessFeatures(Dictionary <string, SabreTools.Help.Feature> features) { // If the base fails, just fail out if (!base.ProcessFeatures(features)) { return(false); } // Get feature flags int workers = GetInt32(features, WorkersInt32Value); string missingSha1s = GetString(features, MissingSha1sStringValue); // Make sure the db is set if (string.IsNullOrWhiteSpace(_db)) { _db = "db.sqlite"; _connectionString = $"Data Source={_db};Version = 3;"; } // Make sure the file exists if (!File.Exists(_db)) { EnsureDatabase(_db, _connectionString); } // Make sure the dats dir is set if (string.IsNullOrWhiteSpace(_dats)) { _dats = "dats"; } _dats = Path.Combine(PathTool.GetRuntimeDirectory(), _dats); // Make sure the folder exists if (!Directory.Exists(_dats)) { Directory.CreateDirectory(_dats); } // First get a list of SHA-1's from the input DATs DatFile datroot = DatFile.Create(); datroot.Header.Type = "SuperDAT"; DatFromDir.PopulateFromDir(datroot, _dats, asFiles: TreatAsFile.NonArchive, hashes: Hash.Standard); datroot.Items.BucketBy(ItemKey.SHA1, DedupeType.None); // Create a List of dat hashes in the database (SHA-1) List <string> databaseDats = new List <string>(); List <string> unneeded = new List <string>(); SqliteConnection dbc = new SqliteConnection(_connectionString); dbc.Open(); // Populate the List from the database InternalStopwatch watch = new InternalStopwatch("Populating the list of existing DATs"); string query = "SELECT DISTINCT hash FROM dat"; SqliteCommand slc = new SqliteCommand(query, dbc); SqliteDataReader sldr = slc.ExecuteReader(); if (sldr.HasRows) { sldr.Read(); string hash = sldr.GetString(0); if (datroot.Items.ContainsKey(hash)) { datroot.Items.Remove(hash); databaseDats.Add(hash); } else if (!databaseDats.Contains(hash)) { unneeded.Add(hash); } } datroot.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true); watch.Stop(); slc.Dispose(); sldr.Dispose(); // Loop through the Dictionary and add all data watch.Start("Adding new DAT information"); foreach (string key in datroot.Items.Keys) { foreach (Rom value in datroot.Items[key]) { AddDatToDatabase(value, dbc); } } watch.Stop(); // Now loop through and remove all references to old Dats if (unneeded.Count > 0) { watch.Start("Removing unmatched DAT information"); query = "DELETE FROM dat WHERE"; foreach (string dathash in unneeded) { query += $" OR hash=\"{dathash}\""; } query = query.Replace("WHERE OR", "WHERE"); slc = new SqliteCommand(query, dbc); slc.ExecuteNonQuery(); slc.Dispose(); watch.Stop(); } dbc.Dispose(); return(true); }
/// <summary> /// Wrap refreshing the database with potentially new dats /// </summary> /// <param name="workers">How many workers to launch for the job, default from config</param> /// <param name="missingSha1s">Write paths of dats with missing sha1s into this file</param> private static void InitRefreshDats( int workers, string missingSha1s) { // Make sure the db is set if (String.IsNullOrWhiteSpace(_db)) { _db = "db.sqlite"; _connectionString = "Data Source=" + _db + ";Version = 3;"; } // Make sure the file exists if (!File.Exists(_db)) { DatabaseTools.EnsureDatabase(_dbSchema, _db, _connectionString); } // Make sure the dats dir is set if (String.IsNullOrWhiteSpace(_dats)) { _dats = "dats"; } _dats = Path.Combine(Globals.ExeDir, _dats); // Make sure the folder exists if (!Directory.Exists(_dats)) { Directory.CreateDirectory(_dats); } // First get a list of SHA-1's from the input DATs DatFile datroot = new DatFile { Type = "SuperDAT", }; // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null); datroot.BucketBy(SortedBy.SHA1, DedupeType.None); // Create a List of dat hashes in the database (SHA-1) List <string> databaseDats = new List <string>(); List <string> unneeded = new List <string>(); SqliteConnection dbc = new SqliteConnection(_connectionString); dbc.Open(); // Populate the List from the database InternalStopwatch watch = new InternalStopwatch("Populating the list of existing DATs"); string query = "SELECT DISTINCT hash FROM dat"; SqliteCommand slc = new SqliteCommand(query, dbc); SqliteDataReader sldr = slc.ExecuteReader(); if (sldr.HasRows) { sldr.Read(); string hash = sldr.GetString(0); if (datroot.Contains(hash)) { datroot.Remove(hash); databaseDats.Add(hash); } else if (!databaseDats.Contains(hash)) { unneeded.Add(hash); } } datroot.BucketBy(SortedBy.Game, DedupeType.None, norename: true); watch.Stop(); slc.Dispose(); sldr.Dispose(); // Loop through the Dictionary and add all data watch.Start("Adding new DAT information"); foreach (string key in datroot.Keys) { foreach (Rom value in datroot[key]) { AddDatToDatabase(value, dbc); } } watch.Stop(); // Now loop through and remove all references to old Dats if (unneeded.Count > 0) { watch.Start("Removing unmatched DAT information"); query = "DELETE FROM dat WHERE"; foreach (string dathash in unneeded) { query += " OR hash=\"" + dathash + "\""; } query = query.Replace("WHERE OR", "WHERE"); slc = new SqliteCommand(query, dbc); slc.ExecuteNonQuery(); slc.Dispose(); watch.Stop(); } dbc.Dispose(); }
/// <summary> /// Output non-cascading diffs /// </summary> /// <param name="datFile">Current DatFile object to use for updating</param> /// <param name="inputs">List of inputs to write out from</param> public static List <DatFile> DiffIndividuals(DatFile datFile, List <ParentablePath> inputs) { InternalStopwatch watch = new InternalStopwatch("Initializing all individual DATs"); // Fill in any information not in the base DAT if (string.IsNullOrWhiteSpace(datFile.Header.FileName)) { datFile.Header.FileName = "All DATs"; } if (string.IsNullOrWhiteSpace(datFile.Header.Name)) { datFile.Header.Name = "All DATs"; } if (string.IsNullOrWhiteSpace(datFile.Header.Description)) { datFile.Header.Description = "All DATs"; } // Loop through each of the inputs and get or create a new DatData object DatFile[] outDatsArray = new DatFile[inputs.Count]; Parallel.For(0, inputs.Count, Globals.ParallelOptions, j => { string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)"; DatFile diffData = DatFile.Create(datFile.Header); diffData.Header.FileName += innerpost; diffData.Header.Name += innerpost; diffData.Header.Description += innerpost; diffData.Items = new ItemDictionary(); outDatsArray[j] = diffData; }); // Create a list of DatData objects representing individual output files List <DatFile> outDats = outDatsArray.ToList(); watch.Stop(); // Now, loop through the dictionary and populate the correct DATs watch.Start("Populating all individual DATs"); Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key => { ConcurrentList <DatItem> items = DatItem.Merge(datFile.Items[key]); // If the rom list is empty or null, just skip it if (items == null || items.Count == 0) { return; } // Loop through and add the items correctly foreach (DatItem item in items) { if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00) { outDats[item.Source.Index].Items.Add(key, item); } } }); watch.Stop(); return(outDats.ToList()); }