/// <summary>
        /// Wrap creating a havefile and a missfile for each Dat
        /// </summary>
        /// <param name="inputs">List of DAT files to get a miss and have for, empty means all</param>
        /// TODO: Implement
        private static void InitMiss(List <string> inputs)
        {
            // Verify the filenames
            Dictionary <string, string> foundDats = GetValidDats(inputs);

            // Create the new output directory if it doesn't exist
            Utilities.EnsureOutputDirectory(Path.Combine(Globals.ExeDir, "out"), create: true);

            // Now that we have the dictionary, we can loop through and output to a new folder for each
            foreach (string key in foundDats.Keys)
            {
                // Get the DAT file associated with the key
                DatFile datFile = new DatFile();
                datFile.Parse(Path.Combine(_dats, foundDats[key]), 0, 0);

                // Now loop through and see if all of the hash combinations exist in the database
                /* ended here */
            }

            Globals.Logger.Error("This feature is not yet implemented: miss");
        }
Exemplo n.º 2
0
        public override void ProcessFeatures(Dictionary <string, Feature> features)
        {
            base.ProcessFeatures(features);

            // Get feature flags
            string name        = GetString(features, NameStringValue);
            string description = GetString(features, DescriptionStringValue);
            string newdat      = GetString(features, NewStringValue);
            string olddat      = GetString(features, OldStringValue);
            string outdat      = GetString(features, OutStringValue);

            // Ensure the output directory
            DirectoryExtensions.Ensure(outdat, create: true);

            // Check that all required files exist
            if (!File.Exists(olddat))
            {
                logger.Error($"File '{olddat}' does not exist!");
                return;
            }

            if (!File.Exists(newdat))
            {
                logger.Error($"File '{newdat}' does not exist!");
                return;
            }

            // Create the encapsulating datfile
            DatFile datfile = DatFile.Create();

            datfile.Header.Name        = name;
            datfile.Header.Description = description;
            datfile.Parse(olddat);

            // Diff against the new datfile
            DatFile intDat = DatFile.CreateAndParse(newdat);

            datfile.DiffAgainst(intDat, false);
            intDat.Write(outdat);
        }
        /// <summary>
        /// Wrap building all files from a set of DATs
        /// </summary>
        /// <param name="inputs">List of input DATs to rebuild from</param>
        /// <param name="outdat">Output file</param>
        /// <paran name="fixdatOnly">True to only fix dats and don't generate torrentzips, false otherwise</paran>
        /// <param name="copy">True if files should be copied to output, false for rebuild</param>
        /// <param name="workers">How many workers to launch for the job, default from config</param>
        /// <param name="subworkers">How many subworkers to launch for each worker, default from config</param>
        private static void InitBuild(
            List <string> inputs,
            string outdat,
            bool fixdatOnly,
            bool copy,
            int workers,
            int subworkers)
        {
            // Verify the filenames
            Dictionary <string, string> foundDats = GetValidDats(inputs);

            // Ensure the output directory is set
            if (String.IsNullOrWhiteSpace(outdat))
            {
                outdat = "out";
            }

            // Now that we have the dictionary, we can loop through and output to a new folder for each
            foreach (string key in foundDats.Keys)
            {
                // Get the DAT file associated with the key
                DatFile datFile = new DatFile();
                datFile.Parse(Path.Combine(_dats, foundDats[key]), 0, 0);

                // Create the new output directory if it doesn't exist
                string outputFolder = Path.Combine(outdat, Path.GetFileNameWithoutExtension(foundDats[key]));
                Utilities.EnsureOutputDirectory(outputFolder, create: true);

                // Get all online depots
                List <string> onlineDepots = _depots.Where(d => d.Value.Item2).Select(d => d.Key).ToList();

                // Now scan all of those depots and rebuild
                ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
                datFile.RebuildDepot(onlineDepots, outputFolder, false /*date*/,
                                     false /*delete*/, false /*inverse*/, (copy ? OutputFormat.TorrentGzip : OutputFormat.TorrentZip), copy,
                                     false /*updateDat*/, null /*headerToCheckAgainst*/);
            }
        }
Exemplo n.º 4
0
        public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features)
        {
            base.ProcessFeatures(features);
            SplittingMode splittingMode = GetSplittingMode(features);

            // If we somehow have the "none" split type, return
            if (splittingMode == SplittingMode.None)
            {
                return;
            }

            // Get only files from the inputs
            List <ParentablePath> files = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);

            // Loop over the input files
            foreach (ParentablePath file in files)
            {
                // Create and fill the new DAT
                DatFile internalDat = DatFile.Create(Header);
                internalDat.Parse(file);

                // Get the output directory
                OutputDir = file.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                // Extension splitting
                if (splittingMode.HasFlag(SplittingMode.Extension))
                {
                    (DatFile extADat, DatFile extBDat) = internalDat.SplitByExtension(GetList(features, ExtAListValue), GetList(features, ExtBListValue));

                    InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs");

                    // Output both possible DatFiles
                    extADat.Write(OutputDir);
                    extBDat.Write(OutputDir);

                    watch.Stop();
                }

                // Hash splitting
                if (splittingMode.HasFlag(SplittingMode.Hash))
                {
                    Dictionary <Field, DatFile> typeDats = internalDat.SplitByHash();

                    InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs");

                    // Loop through each type DatFile
                    Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType =>
                    {
                        typeDats[itemType].Write(OutputDir);
                    });

                    watch.Stop();
                }

                // Level splitting
                if (splittingMode.HasFlag(SplittingMode.Level))
                {
                    logger.Warning("This feature is not implemented: level-split");
                    internalDat.SplitByLevel(
                        OutputDir,
                        GetBoolean(features, ShortValue),
                        GetBoolean(features, BaseValue));
                }

                // Size splitting
                if (splittingMode.HasFlag(SplittingMode.Size))
                {
                    (DatFile lessThan, DatFile greaterThan) = internalDat.SplitBySize(GetInt64(features, RadixInt64Value));

                    InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs");

                    // Output both possible DatFiles
                    lessThan.Write(OutputDir);
                    greaterThan.Write(OutputDir);

                    watch.Stop();
                }

                // Type splitting
                if (splittingMode.HasFlag(SplittingMode.Type))
                {
                    Dictionary <ItemType, DatFile> typeDats = internalDat.SplitByType();

                    InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs");

                    // Loop through each type DatFile
                    Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType =>
                    {
                        typeDats[itemType].Write(OutputDir);
                    });

                    watch.Stop();
                }
            }
        }
Exemplo n.º 5
0
        /// <summary>
        /// Wrap verifying files using an input DAT
        /// </summary>
        /// <param name="datfiles">Names of the DATs to compare against</param>
        /// <param name="inputs">Input directories to compare against</param>
        /// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
        /// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
        /// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
        /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
        /// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
        /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
        /// <param name="individual">True if DATs should be verified individually, false if they should be done in bulk</param>
        private static void InitVerify(
            List <string> datfiles,
            List <string> inputs,
            bool depot,
            bool hashOnly,
            bool quickScan,
            string headerToCheckAgainst,
            SplitType splitType,
            bool chdsAsFiles,
            bool individual)
        {
            // Get the archive scanning level
            ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);

            // Get a list of files from the input datfiles
            datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);

            // If we are in individual mode, process each DAT on their own
            if (individual)
            {
                foreach (string datfile in datfiles)
                {
                    DatFile datdata = new DatFile();
                    datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);

                    // If we have the depot flag, respect it
                    if (depot)
                    {
                        datdata.VerifyDepot(inputs, headerToCheckAgainst);
                    }
                    else
                    {
                        datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles);
                    }
                }
            }
            // Otherwise, process all DATs into the same output
            else
            {
                InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");

                // Add all of the input DATs into one huge internal DAT
                DatFile datdata = new DatFile();
                foreach (string datfile in datfiles)
                {
                    datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
                }

                watch.Stop();

                // If we have the depot flag, respect it
                if (depot)
                {
                    datdata.VerifyDepot(inputs, headerToCheckAgainst);
                }
                else
                {
                    datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles);
                }
            }
        }
Exemplo n.º 6
0
        /// <summary>
        /// Wrap sorting files using an input DAT
        /// </summary>
        /// <param name="datfiles">Names of the DATs to compare against</param>
        /// <param name="inputs">List of input files/folders to check</param>
        /// <param name="outDir">Output directory to use to build to</param>
        /// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
        /// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
        /// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
        /// <param name="delete">True if input files should be deleted, false otherwise</param>
        /// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
        /// <param name="outputFormat">Output format that files should be written to</param>
        /// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
        /// <param name="sevenzip">Integer representing the archive handling level for 7z</param>
        /// <param name="gz">Integer representing the archive handling level for GZip</param>
        /// <param name="rar">Integer representing the archive handling level for RAR</param>
        /// <param name="zip">Integer representing the archive handling level for Zip</param>
        /// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
        /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
        /// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
        /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
        /// <param name="individual">True if DATs should be sorted individually, false if they should be done in bulk</param>
        private static void InitSort(
            List <string> datfiles,
            List <string> inputs,
            string outDir,
            bool depot,
            bool quickScan,
            bool date,
            bool delete,
            bool inverse,
            OutputFormat outputFormat,
            bool romba,
            int sevenzip,
            int gz,
            int rar,
            int zip,
            bool updateDat,
            string headerToCheckAgainst,
            SplitType splitType,
            bool chdsAsFiles,
            bool individual)
        {
            // Get the archive scanning level
            ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(sevenzip, gz, rar, zip);

            // Get a list of files from the input datfiles
            datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);

            // If we are in individual mode, process each DAT on their own, appending the DAT name to the output dir
            if (individual)
            {
                foreach (string datfile in datfiles)
                {
                    DatFile datdata = new DatFile();
                    datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);

                    // If we have the depot flag, respect it
                    if (depot)
                    {
                        datdata.RebuildDepot(inputs, Path.Combine(outDir, datdata.FileName), date, delete, inverse, outputFormat, romba,
                                             updateDat, headerToCheckAgainst);
                    }
                    else
                    {
                        datdata.RebuildGeneric(inputs, Path.Combine(outDir, datdata.FileName), quickScan, date, delete, inverse, outputFormat, romba, asl,
                                               updateDat, headerToCheckAgainst, chdsAsFiles);
                    }
                }
            }
            // Otherwise, process all DATs into the same output
            else
            {
                InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");

                // Add all of the input DATs into one huge internal DAT
                DatFile datdata = new DatFile();
                foreach (string datfile in datfiles)
                {
                    datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
                }

                watch.Stop();

                // If we have the depot flag, respect it
                if (depot)
                {
                    datdata.RebuildDepot(inputs, outDir, date, delete, inverse, outputFormat, romba,
                                         updateDat, headerToCheckAgainst);
                }
                else
                {
                    datdata.RebuildGeneric(inputs, outDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
                                           updateDat, headerToCheckAgainst, chdsAsFiles);
                }
            }
        }
Exemplo n.º 7
0
        /// <summary>
        /// Add a new DAT to the database
        /// </summary>
        /// <param name="dat">DatFile hash information to add</param>
        /// <param name="dbc">Database connection to use</param>
        private static void AddDatToDatabase(Rom dat, SqliteConnection dbc)
        {
            // Get the dat full path
            string fullpath = Path.Combine(_dats, (dat.MachineName == "dats" ? "" : dat.MachineName), dat.Name);

            // Parse the Dat if possible
            Globals.Logger.User("Adding from '" + dat.Name + "'");
            DatFile tempdat = new DatFile();

            tempdat.Parse(fullpath, 0, 0);

            // If the Dat wasn't empty, add the information
            SqliteCommand slc = new SqliteCommand();

            if (tempdat.Count != 0)
            {
                string crcquery     = "INSERT OR IGNORE INTO crc (crc) VALUES";
                string md5query     = "INSERT OR IGNORE INTO md5 (md5) VALUES";
                string sha1query    = "INSERT OR IGNORE INTO sha1 (sha1) VALUES";
                string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES";
                string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES";

                // Loop through the parsed entries
                foreach (string romkey in tempdat.Keys)
                {
                    foreach (DatItem datItem in tempdat[romkey])
                    {
                        Globals.Logger.Verbose("Checking and adding file '{0}'", datItem.Name);

                        if (datItem.ItemType == ItemType.Rom)
                        {
                            Rom rom = (Rom)datItem;

                            if (!String.IsNullOrWhiteSpace(rom.CRC))
                            {
                                crcquery += " (\"" + rom.CRC + "\"),";
                            }
                            if (!String.IsNullOrWhiteSpace(rom.MD5))
                            {
                                md5query += " (\"" + rom.MD5 + "\"),";
                            }
                            if (!String.IsNullOrWhiteSpace(rom.SHA1))
                            {
                                sha1query += " (\"" + rom.SHA1 + "\"),";

                                if (!String.IsNullOrWhiteSpace(rom.CRC))
                                {
                                    crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),";
                                }
                                if (!String.IsNullOrWhiteSpace(rom.MD5))
                                {
                                    md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),";
                                }
                            }
                        }
                        else if (datItem.ItemType == ItemType.Disk)
                        {
                            Disk disk = (Disk)datItem;

                            if (!String.IsNullOrWhiteSpace(disk.MD5))
                            {
                                md5query += " (\"" + disk.MD5 + "\"),";
                            }
                            if (!String.IsNullOrWhiteSpace(disk.SHA1))
                            {
                                sha1query += " (\"" + disk.SHA1 + "\"),";

                                if (!String.IsNullOrWhiteSpace(disk.MD5))
                                {
                                    md5sha1query += " (\"" + disk.MD5 + "\", \"" + disk.SHA1 + "\"),";
                                }
                            }
                        }
                    }
                }

                // Now run the queries after fixing them
                if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES")
                {
                    slc = new SqliteCommand(crcquery.TrimEnd(','), dbc);
                    slc.ExecuteNonQuery();
                }
                if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES")
                {
                    slc = new SqliteCommand(md5query.TrimEnd(','), dbc);
                    slc.ExecuteNonQuery();
                }
                if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1) VALUES")
                {
                    slc = new SqliteCommand(sha1query.TrimEnd(','), dbc);
                    slc.ExecuteNonQuery();
                }
                if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES")
                {
                    slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc);
                    slc.ExecuteNonQuery();
                }
                if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES")
                {
                    slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc);
                    slc.ExecuteNonQuery();
                }
            }

            string datquery = "INSERT OR IGNORE INTO dat (hash) VALUES (\"" + dat.SHA1 + "\")";

            slc = new SqliteCommand(datquery, dbc);
            slc.ExecuteNonQuery();
            slc.Dispose();
        }
Exemplo n.º 8
0
        public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features)
        {
            base.ProcessFeatures(features);

            // Get feature flags
            var updateFields = GetUpdateFields(features);
            var updateMode   = GetUpdateMode(features);

            // Normalize the extensions
            Header.AddExtension = (string.IsNullOrWhiteSpace(Header.AddExtension) || Header.AddExtension.StartsWith(".")
                ? Header.AddExtension
                : $".{Header.AddExtension}");
            Header.ReplaceExtension = (string.IsNullOrWhiteSpace(Header.ReplaceExtension) || Header.ReplaceExtension.StartsWith(".")
                ? Header.ReplaceExtension
                : $".{Header.ReplaceExtension}");

            // If we're in a special update mode and the names aren't set, set defaults
            if (updateMode != 0)
            {
                // Get the values that will be used
                if (string.IsNullOrWhiteSpace(Header.Date))
                {
                    Header.Date = DateTime.Now.ToString("yyyy-MM-dd");
                }

                if (string.IsNullOrWhiteSpace(Header.Name))
                {
                    Header.Name = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
                                  + (Header.Type == "SuperDAT" ? "-SuperDAT" : string.Empty)
                                  + (Cleaner.DedupeRoms != DedupeType.None ? "-deduped" : string.Empty);
                }

                if (string.IsNullOrWhiteSpace(Header.Description))
                {
                    Header.Description = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
                                         + (Header.Type == "SuperDAT" ? "-SuperDAT" : string.Empty)
                                         + (Cleaner.DedupeRoms != DedupeType.None ? " - deduped" : string.Empty);

                    if (!GetBoolean(features, NoAutomaticDateValue))
                    {
                        Header.Description += $" ({Header.Date})";
                    }
                }

                if (string.IsNullOrWhiteSpace(Header.Category) && updateMode != 0)
                {
                    Header.Category = "DiffDAT";
                }

                if (string.IsNullOrWhiteSpace(Header.Author))
                {
                    Header.Author = "SabreTools";
                }
            }

            // If no update fields are set, default to Names
            if (updateFields == null || updateFields.Count == 0)
            {
                updateFields = new List <Field>()
                {
                    Field.DatItem_Name
                }
            }
            ;

            // Ensure we only have files in the inputs
            List <ParentablePath> inputPaths = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);
            List <ParentablePath> basePaths  = DirectoryExtensions.GetFilesOnly(GetList(features, BaseDatListValue));

            // If we're in standard update mode, run through all of the inputs
            if (updateMode == UpdateMode.None)
            {
                // Loop through each input and update
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Create a new base DatFile
                    DatFile datFile = DatFile.Create(Header);
                    logger.User($"Processing '{Path.GetFileName(inputPath.CurrentPath)}'");
                    datFile.Parse(inputPath, keep: true,
                                  keepext: datFile.Header.DatFormat.HasFlag(DatFormat.TSV) ||
                                  datFile.Header.DatFormat.HasFlag(DatFormat.CSV) ||
                                  datFile.Header.DatFormat.HasFlag(DatFormat.SSV));

                    // Perform additional processing steps
                    datFile.ApplyExtras(Extras);
                    datFile.ApplySplitting(GetSplitType(features), false);
                    datFile.ApplyFilter(Filter);
                    datFile.ApplyCleaning(Cleaner);

                    // Get the correct output path
                    string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file, overwriting only if it's not in the current directory
                    datFile.Write(realOutDir, overwrite: GetBoolean(features, InplaceValue));
                });

                return;
            }

            // Reverse inputs if we're in a required mode
            if (updateMode.HasFlag(UpdateMode.DiffReverseCascade))
            {
                updateMode |= UpdateMode.DiffCascade;
                inputPaths.Reverse();
            }
            if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace))
            {
                updateMode |= UpdateMode.BaseReplace;
                basePaths.Reverse();
            }

            // Create a DAT to capture inputs
            DatFile userInputDat = DatFile.Create(Header);

            // Populate using the correct set
            List <DatHeader> datHeaders;

            if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace))
            {
                datHeaders = userInputDat.PopulateUserData(basePaths);
            }
            else
            {
                datHeaders = userInputDat.PopulateUserData(inputPaths);
            }

            // Perform additional processing steps
            userInputDat.ApplyExtras(Extras);
            userInputDat.ApplySplitting(GetSplitType(features), false);
            userInputDat.ApplyFilter(Filter);
            userInputDat.ApplyCleaning(Cleaner);

            // Output only DatItems that are duplicated across inputs
            if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
            {
                DatFile dupeData = userInputDat.DiffDuplicates(inputPaths);

                InternalStopwatch watch = new InternalStopwatch("Outputting duplicate DAT");
                dupeData.Write(OutputDir, overwrite: false);
                watch.Stop();
            }

            // Output only DatItems that are not duplicated across inputs
            if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly))
            {
                DatFile outerDiffData = userInputDat.DiffNoDuplicates(inputPaths);

                InternalStopwatch watch = new InternalStopwatch("Outputting no duplicate DAT");
                outerDiffData.Write(OutputDir, overwrite: false);
                watch.Stop();
            }

            // Output only DatItems that are unique to each input
            if (updateMode.HasFlag(UpdateMode.DiffIndividualsOnly))
            {
                // Get all of the output DatFiles
                List <DatFile> datFiles = userInputDat.DiffIndividuals(inputPaths);

                // Loop through and output the new DatFiles
                InternalStopwatch watch = new InternalStopwatch("Outputting all individual DATs");

                Parallel.For(0, inputPaths.Count, Globals.ParallelOptions, j =>
                {
                    string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file
                    datFiles[j].Write(path, overwrite: GetBoolean(features, InplaceValue));
                });

                watch.Stop();
            }

            // Output cascaded diffs
            if (updateMode.HasFlag(UpdateMode.DiffCascade))
            {
                // Preprocess the DatHeaders
                Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
                {
                    // If we're outputting to the runtime folder, rename
                    if (!GetBoolean(features, InplaceValue) && OutputDir == Environment.CurrentDirectory)
                    {
                        string innerpost = $" ({j} - {inputPaths[j].GetNormalizedFileName(true)} Only)";

                        datHeaders[j]              = userInputDat.Header;
                        datHeaders[j].FileName    += innerpost;
                        datHeaders[j].Name        += innerpost;
                        datHeaders[j].Description += innerpost;
                    }
                });

                // Get all of the output DatFiles
                List <DatFile> datFiles = userInputDat.DiffCascade(datHeaders);

                // Loop through and output the new DatFiles
                InternalStopwatch watch = new InternalStopwatch("Outputting all created DATs");

                int startIndex = GetBoolean(features, SkipFirstOutputValue) ? 1 : 0;
                Parallel.For(startIndex, inputPaths.Count, Globals.ParallelOptions, j =>
                {
                    string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file
                    datFiles[j].Write(path, overwrite: GetBoolean(features, InplaceValue));
                });

                watch.Stop();
            }

            // Output differences against a base DAT
            if (updateMode.HasFlag(UpdateMode.DiffAgainst))
            {
                // Loop through each input and diff against the base
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Parse the path to a new DatFile
                    DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
                    repDat.Parse(inputPath, indexId: 1, keep: true);

                    // Perform additional processing steps
                    repDat.ApplyExtras(Extras);
                    repDat.ApplySplitting(GetSplitType(features), false);
                    repDat.ApplyFilter(Filter);
                    repDat.ApplyCleaning(Cleaner);

                    // Now replace the fields from the base DatFile
                    userInputDat.DiffAgainst(repDat, GetBoolean(Features, ByGameValue));

                    // Finally output the diffed DatFile
                    string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
                    repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue));
                });
            }

            // Output DATs after replacing fields from a base DatFile
            if (updateMode.HasFlag(UpdateMode.BaseReplace))
            {
                // Loop through each input and apply the base DatFile
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Parse the path to a new DatFile
                    DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
                    repDat.Parse(inputPath, indexId: 1, keep: true);

                    // Perform additional processing steps
                    repDat.ApplyExtras(Extras);
                    repDat.ApplySplitting(GetSplitType(features), false);
                    repDat.ApplyFilter(Filter);
                    repDat.ApplyCleaning(Cleaner);

                    // Now replace the fields from the base DatFile
                    userInputDat.BaseReplace(repDat, updateFields, GetBoolean(features, OnlySameValue));

                    // Finally output the replaced DatFile
                    string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
                    repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue));
                });
            }

            // Merge all input files and write
            // This has to be last due to the SuperDAT handling
            if (updateMode.HasFlag(UpdateMode.Merge))
            {
                // If we're in SuperDAT mode, prefix all games with their respective DATs
                if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase))
                {
                    userInputDat.ApplySuperDAT(inputPaths);
                }

                userInputDat.Write(OutputDir);
            }
        }
    }
Exemplo n.º 9
0
        public override void ProcessFeatures(Dictionary <string, Feature> features)
        {
            base.ProcessFeatures(features);

            // Get a list of files from the input datfiles
            var datfiles     = GetList(features, DatListValue);
            var datfilePaths = DirectoryExtensions.GetFilesOnly(datfiles);

            // Get feature flags
            TreatAsFile asFiles   = GetTreatAsFiles(features);
            bool        hashOnly  = GetBoolean(features, HashOnlyValue);
            bool        quickScan = GetBoolean(features, QuickValue);
            var         splitType = GetSplitType(features);

            // If we are in individual mode, process each DAT on their own
            if (GetBoolean(features, IndividualValue))
            {
                foreach (ParentablePath datfile in datfilePaths)
                {
                    // Parse in from the file
                    DatFile datdata = DatFile.Create();
                    datdata.Parse(datfile, int.MaxValue, keep: true);

                    // Perform additional processing steps
                    datdata.ApplyExtras(Extras);
                    datdata.ApplySplitting(splitType, true);
                    datdata.ApplyFilter(Filter);
                    datdata.ApplyCleaning(Cleaner);

                    // Set depot information
                    datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation;

                    // If we have overridden the header skipper, set it now
                    if (!string.IsNullOrEmpty(Header.HeaderSkipper))
                    {
                        datdata.Header.HeaderSkipper = Header.HeaderSkipper;
                    }

                    // If we have the depot flag, respect it
                    if (Header.InputDepot?.IsActive ?? false)
                    {
                        datdata.VerifyDepot(Inputs);
                    }
                    else
                    {
                        // Loop through and add the inputs to check against
                        logger.User("Processing files:\n");
                        foreach (string input in Inputs)
                        {
                            datdata.PopulateFromDir(input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
                        }

                        datdata.VerifyGeneric(hashOnly);
                    }

                    // Now write out if there are any items left
                    datdata.WriteStatsToConsole();
                    datdata.Write(OutputDir);
                }
            }
            // Otherwise, process all DATs into the same output
            else
            {
                InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");

                // Add all of the input DATs into one huge internal DAT
                DatFile datdata = DatFile.Create();
                foreach (ParentablePath datfile in datfilePaths)
                {
                    datdata.Parse(datfile, int.MaxValue, keep: true);
                }

                // Perform additional processing steps
                datdata.ApplyExtras(Extras);
                datdata.ApplySplitting(splitType, true);
                datdata.ApplyFilter(Filter);
                datdata.ApplyCleaning(Cleaner);

                // Set depot information
                datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation;

                // If we have overridden the header skipper, set it now
                if (!string.IsNullOrEmpty(Header.HeaderSkipper))
                {
                    datdata.Header.HeaderSkipper = Header.HeaderSkipper;
                }

                watch.Stop();

                // If we have the depot flag, respect it
                if (Header.InputDepot?.IsActive ?? false)
                {
                    datdata.VerifyDepot(Inputs);
                }
                else
                {
                    // Loop through and add the inputs to check against
                    logger.User("Processing files:\n");
                    foreach (string input in Inputs)
                    {
                        datdata.PopulateFromDir(input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
                    }

                    datdata.VerifyGeneric(hashOnly);
                }

                // Now write out if there are any items left
                datdata.WriteStatsToConsole();
                datdata.Write(OutputDir);
            }
        }
Exemplo n.º 10
0
        public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features)
        {
            base.ProcessFeatures(features);

            // Try to read each input as a batch run file
            foreach (string path in Inputs)
            {
                // If the file doesn't exist, warn but continue
                if (!File.Exists(path))
                {
                    logger.User($"{path} does not exist. Skipping...");
                    continue;
                }

                // Try to process the file now
                try
                {
                    // Every line is its own command
                    string[] lines = File.ReadAllLines(path);

                    // Each batch file has its own state
                    int     index           = 0;
                    DatFile datFile         = DatFile.Create();
                    string  outputDirectory = null;

                    // Process each command line
                    foreach (string line in lines)
                    {
                        // Skip empty lines
                        if (string.IsNullOrWhiteSpace(line))
                        {
                            continue;
                        }

                        // Skip lines that start with REM or #
                        if (line.StartsWith("REM") || line.StartsWith("#"))
                        {
                            continue;
                        }

                        // Read the command in, if possible
                        var command = BatchCommand.Create(line);
                        if (command == null)
                        {
                            logger.User($"Could not process {path} due to the following line: {line}");
                            break;
                        }

                        // Now switch on the command
                        logger.User($"Attempting to invoke {command.Name} with {(command.Arguments.Count == 0 ? "no arguments" : "the following argument(s): " + string.Join(", ", command.Arguments))}");
                        switch (command.Name.ToLowerInvariant())
                        {
                        // Set a header field
                        case "set":
                            if (command.Arguments.Count != 2)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: set(header.field, value);");
                                continue;
                            }

                            // Read in the individual arguments
                            Field  field = command.Arguments[0].AsField();
                            string value = command.Arguments[1];

                            // If we had an invalid input, log and continue
                            if (field == Field.NULL)
                            {
                                logger.User($"{command.Arguments[0]} was an invalid field name");
                                continue;
                            }

                            // Set the header field
                            datFile.Header.SetFields(new Dictionary <Field, string> {
                                [field] = value
                            });

                            break;

                        // Parse in new input file(s)
                        case "input":
                            if (command.Arguments.Count == 0)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: input(datpath, ...);");
                                continue;
                            }

                            // Get only files from inputs
                            List <ParentablePath> datFilePaths = DirectoryExtensions.GetFilesOnly(command.Arguments);

                            // Assume there could be multiple
                            foreach (ParentablePath datFilePath in datFilePaths)
                            {
                                datFile.Parse(datFilePath, index++);
                            }

                            break;

                        // Run DFD/D2D on path(s)
                        case "d2d":
                        case "dfd":
                            if (command.Arguments.Count == 0)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: d2d(path, ...);");
                                continue;
                            }

                            // TODO: Should any of the other options be added for D2D?

                            // Assume there could be multiple
                            foreach (string input in command.Arguments)
                            {
                                datFile.PopulateFromDir(input);
                            }

                            // TODO: We might not want to remove higher order hashes in the future
                            // TODO: We might not want to remove dates in the future
                            Cleaner dfdCleaner = new Cleaner()
                            {
                                ExcludeFields = Hash.DeepHashes.AsFields()
                            };
                            dfdCleaner.ExcludeFields.Add(Field.DatItem_Date);
                            datFile.ApplyCleaning(dfdCleaner);

                            break;

                        // Apply a filter
                        case "filter":
                            if (command.Arguments.Count < 2 || command.Arguments.Count > 4)
                            {
                                logger.User($"Invoked {command.Name} and expected between 2-4 arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: filter(field, value, [remove = false, [perMachine = false]]);");
                                continue;
                            }

                            // Read in the individual arguments
                            Field  filterField  = command.Arguments[0].AsField();
                            string filterValue  = command.Arguments[1];
                            bool?  filterRemove = false;
                            if (command.Arguments.Count >= 3)
                            {
                                filterRemove = command.Arguments[2].AsYesNo();
                            }
                            bool?filterPerMachine = false;
                            if (command.Arguments.Count >= 4)
                            {
                                filterPerMachine = command.Arguments[3].AsYesNo();
                            }

                            // If we had an invalid input, log and continue
                            if (filterField == Field.NULL)
                            {
                                logger.User($"{command.Arguments[0]} was an invalid field name");
                                continue;
                            }
                            if (filterRemove == null)
                            {
                                logger.User($"{command.Arguments[2]} was an invalid true/false value");
                                continue;
                            }
                            if (filterPerMachine == null)
                            {
                                logger.User($"{command.Arguments[3]} was an invalid true/false value");
                                continue;
                            }

                            // Create a filter with this new set of fields
                            Filter filter = new Filter();
                            filter.SetFilter(filterField, filterValue, filterRemove.Value);

                            // Apply the filter blindly
                            datFile.ApplyFilter(filter, filterPerMachine.Value);

                            // Cleanup after the filter
                            // TODO: We might not want to remove immediately
                            datFile.Items.ClearMarked();
                            datFile.Items.ClearEmpty();

                            break;

                        // Apply an extra INI
                        case "extra":
                            if (command.Arguments.Count != 2)
                            {
                                logger.User($"Invoked {command.Name} and expected 2 arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: extra(field, inipath);");
                                continue;
                            }

                            // Read in the individual arguments
                            Field  extraField = command.Arguments[0].AsField();
                            string extraFile  = command.Arguments[1];

                            // If we had an invalid input, log and continue
                            if (extraField == Field.NULL)
                            {
                                logger.User($"{command.Arguments[0]} was an invalid field name");
                                continue;
                            }
                            if (!File.Exists(command.Arguments[1]))
                            {
                                logger.User($"{command.Arguments[1]} was an invalid file name");
                                continue;
                            }

                            // Create the extra INI
                            ExtraIni     extraIni     = new ExtraIni();
                            ExtraIniItem extraIniItem = new ExtraIniItem();
                            extraIniItem.PopulateFromFile(extraFile);
                            extraIniItem.Field = extraField;
                            extraIni.Items.Add(extraIniItem);

                            // Apply the extra INI blindly
                            datFile.ApplyExtras(extraIni);

                            break;

                        // Apply internal split/merge
                        case "merge":
                            if (command.Arguments.Count != 1)
                            {
                                logger.User($"Invoked {command.Name} and expected 1 argument, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: merge(split|merged|nonmerged|full|device);");
                                continue;
                            }

                            // Read in the individual arguments
                            MergingFlag mergingFlag = command.Arguments[0].AsMergingFlag();

                            // If we had an invalid input, log and continue
                            if (mergingFlag == MergingFlag.None)
                            {
                                logger.User($"{command.Arguments[0]} was an invalid merging flag");
                                continue;
                            }

                            // Apply the merging flag
                            datFile.ApplySplitting(mergingFlag, false);

                            break;

                        // Apply description-as-name logic
                        case "descname":
                            if (command.Arguments.Count != 0)
                            {
                                logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: descname();");
                                continue;
                            }

                            // Apply the logic
                            datFile.MachineDescriptionToName();

                            break;

                        // Apply 1G1R
                        case "1g1r":
                            if (command.Arguments.Count == 0)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: 1g1r(region, ...);");
                                continue;
                            }

                            // Run the 1G1R functionality
                            datFile.OneGamePerRegion(command.Arguments);

                            break;

                        // Apply one rom per game (ORPG)
                        case "orpg":
                            if (command.Arguments.Count != 0)
                            {
                                logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: orpg();");
                                continue;
                            }

                            // Apply the logic
                            datFile.OneRomPerGame();

                            break;

                        // Remove a field
                        case "remove":
                            if (command.Arguments.Count == 0)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: remove(field, ...);");
                                continue;
                            }

                            // Run the removal functionality
                            datFile.RemoveFieldsFromItems(command.Arguments.Select(s => s.AsField()).ToList());

                            break;

                        // Apply scene date stripping
                        case "sds":
                            if (command.Arguments.Count != 0)
                            {
                                logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: sds();");
                                continue;
                            }

                            // Apply the logic
                            datFile.StripSceneDatesFromItems();

                            break;

                        // Set new output format(s)
                        case "format":
                            if (command.Arguments.Count == 0)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: format(datformat, ...);");
                                continue;
                            }

                            // Assume there could be multiple
                            datFile.Header.DatFormat = 0x00;
                            foreach (string format in command.Arguments)
                            {
                                datFile.Header.DatFormat |= format.AsDatFormat();
                            }

                            // If we had an invalid input, log and continue
                            if (datFile.Header.DatFormat == 0x00)
                            {
                                logger.User($"No valid output format found");
                                continue;
                            }

                            break;

                        // Set output directory
                        case "output":
                            if (command.Arguments.Count != 1)
                            {
                                logger.User($"Invoked {command.Name} and expected exactly 1 argument, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: output(outdir);");
                                continue;
                            }

                            // Only set the first as the output directory
                            outputDirectory = command.Arguments[0];
                            break;

                        // Write out the current DatFile
                        case "write":
                            if (command.Arguments.Count > 1)
                            {
                                logger.User($"Invoked {command.Name} and expected 0-1 arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: write([overwrite = true]);");
                                continue;
                            }

                            // Get overwrite value, if possible
                            bool?overwrite = true;
                            if (command.Arguments.Count == 1)
                            {
                                overwrite = command.Arguments[0].AsYesNo();
                            }

                            // If we had an invalid input, log and continue
                            if (overwrite == null)
                            {
                                logger.User($"{command.Arguments[0]} was an invalid true/false value");
                                continue;
                            }

                            // Write out the dat with the current state
                            datFile.Write(outputDirectory, overwrite: overwrite.Value);
                            break;

                        // Reset the internal state
                        case "reset":
                            if (command.Arguments.Count != 0)
                            {
                                logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: reset();");
                                continue;
                            }

                            // Reset all state variables
                            index           = 0;
                            datFile         = DatFile.Create();
                            outputDirectory = null;
                            break;

                        default:
                            logger.User($"Could not find a match for '{command.Name}'. Please see the help text for more details.");
                            break;
                        }
                    }
                }
                catch (Exception ex)
                {
                    logger.Error(ex, $"There was an exception processing {path}");
                    continue;
                }
            }
        }
Exemplo n.º 11
0
        public override void ProcessFeatures(Dictionary <string, Feature> features)
        {
            base.ProcessFeatures(features);

            // Get feature flags
            TreatAsFile asFiles      = GetTreatAsFiles(features);
            bool        date         = GetBoolean(features, AddDateValue);
            bool        delete       = GetBoolean(features, DeleteValue);
            bool        inverse      = GetBoolean(features, InverseValue);
            bool        quickScan    = GetBoolean(features, QuickValue);
            bool        updateDat    = GetBoolean(features, UpdateDatValue);
            var         outputFormat = GetOutputFormat(features);

            // If we have the romba flag
            if (Header.OutputDepot?.IsActive == true)
            {
                // Update TorrentGzip output
                if (outputFormat == OutputFormat.TorrentGzip)
                {
                    outputFormat = OutputFormat.TorrentGzipRomba;
                }

                // Update TorrentXz output
                else if (outputFormat == OutputFormat.TorrentXZ)
                {
                    outputFormat = OutputFormat.TorrentXZRomba;
                }
            }

            // Get a list of files from the input datfiles
            var datfiles     = GetList(features, DatListValue);
            var datfilePaths = DirectoryExtensions.GetFilesOnly(datfiles);

            // If we are in individual mode, process each DAT on their own, appending the DAT name to the output dir
            if (GetBoolean(features, IndividualValue))
            {
                foreach (ParentablePath datfile in datfilePaths)
                {
                    DatFile datdata = DatFile.Create();
                    datdata.Parse(datfile, int.MaxValue, keep: true);

                    // Set depot information
                    datdata.Header.InputDepot  = Header.InputDepot.Clone() as DepotInformation;
                    datdata.Header.OutputDepot = Header.OutputDepot.Clone() as DepotInformation;

                    // If we have overridden the header skipper, set it now
                    if (!string.IsNullOrEmpty(Header.HeaderSkipper))
                    {
                        datdata.Header.HeaderSkipper = Header.HeaderSkipper;
                    }

                    // If we have the depot flag, respect it
                    bool success;
                    if (Header.InputDepot?.IsActive ?? false)
                    {
                        success = datdata.RebuildDepot(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), date, delete, inverse, outputFormat);
                    }
                    else
                    {
                        success = datdata.RebuildGeneric(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), quickScan, date, delete, inverse, outputFormat, asFiles);
                    }

                    // If we have a success and we're updating the DAT, write it out
                    if (success && updateDat)
                    {
                        datdata.Header.FileName    = $"fixDAT_{Header.FileName}";
                        datdata.Header.Name        = $"fixDAT_{Header.Name}";
                        datdata.Header.Description = $"fixDAT_{Header.Description}";
                        datdata.Items.ClearMarked();
                        datdata.Write(OutputDir);
                    }
                }
            }

            // Otherwise, process all DATs into the same output
            else
            {
                InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");

                // Add all of the input DATs into one huge internal DAT
                DatFile datdata = DatFile.Create();
                foreach (ParentablePath datfile in datfilePaths)
                {
                    datdata.Parse(datfile, int.MaxValue, keep: true);
                }

                // Set depot information
                datdata.Header.InputDepot  = Header.InputDepot.Clone() as DepotInformation;
                datdata.Header.OutputDepot = Header.OutputDepot.Clone() as DepotInformation;

                // If we have overridden the header skipper, set it now
                if (!string.IsNullOrEmpty(Header.HeaderSkipper))
                {
                    datdata.Header.HeaderSkipper = Header.HeaderSkipper;
                }

                watch.Stop();

                // If we have the depot flag, respect it
                bool success;
                if (Header.InputDepot?.IsActive ?? false)
                {
                    success = datdata.RebuildDepot(Inputs, OutputDir, date, delete, inverse, outputFormat);
                }
                else
                {
                    success = datdata.RebuildGeneric(Inputs, OutputDir, quickScan, date, delete, inverse, outputFormat, asFiles);
                }

                // If we have a success and we're updating the DAT, write it out
                if (success && updateDat)
                {
                    datdata.Header.FileName    = $"fixDAT_{Header.FileName}";
                    datdata.Header.Name        = $"fixDAT_{Header.Name}";
                    datdata.Header.Description = $"fixDAT_{Header.Description}";
                    datdata.Items.ClearMarked();
                    datdata.Write(OutputDir);
                }
            }
        }