/// <summary>
        /// Wrap building all files from a set of DATs
        /// </summary>
        /// <param name="inputs">List of input DATs to rebuild from</param>
        /// <param name="outdat">Output file</param>
        /// <paran name="fixdatOnly">True to only fix dats and don't generate torrentzips, false otherwise</paran>
        /// <param name="copy">True if files should be copied to output, false for rebuild</param>
        /// <param name="workers">How many workers to launch for the job, default from config</param>
        /// <param name="subworkers">How many subworkers to launch for each worker, default from config</param>
        private static void InitBuild(
            List <string> inputs,
            string outdat,
            bool fixdatOnly,
            bool copy,
            int workers,
            int subworkers)
        {
            // Verify the filenames
            Dictionary <string, string> foundDats = GetValidDats(inputs);

            // Ensure the output directory is set
            if (String.IsNullOrWhiteSpace(outdat))
            {
                outdat = "out";
            }

            // Now that we have the dictionary, we can loop through and output to a new folder for each
            foreach (string key in foundDats.Keys)
            {
                // Get the DAT file associated with the key
                DatFile datFile = new DatFile();
                datFile.Parse(Path.Combine(_dats, foundDats[key]), 0, 0);

                // Create the new output directory if it doesn't exist
                string outputFolder = Path.Combine(outdat, Path.GetFileNameWithoutExtension(foundDats[key]));
                Utilities.EnsureOutputDirectory(outputFolder, create: true);

                // Get all online depots
                List <string> onlineDepots = _depots.Where(d => d.Value.Item2).Select(d => d.Key).ToList();

                // Now scan all of those depots and rebuild
                ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
                datFile.RebuildDepot(onlineDepots, outputFolder, false /*date*/,
                                     false /*delete*/, false /*inverse*/, (copy ? OutputFormat.TorrentGzip : OutputFormat.TorrentZip), copy,
                                     false /*updateDat*/, null /*headerToCheckAgainst*/);
            }
        }
Exemplo n.º 2
0
        /// <summary>
        /// Wrap verifying files using an input DAT
        /// </summary>
        /// <param name="datfiles">Names of the DATs to compare against</param>
        /// <param name="inputs">Input directories to compare against</param>
        /// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
        /// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
        /// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
        /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
        /// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
        /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
        /// <param name="individual">True if DATs should be verified individually, false if they should be done in bulk</param>
        private static void InitVerify(
            List <string> datfiles,
            List <string> inputs,
            bool depot,
            bool hashOnly,
            bool quickScan,
            string headerToCheckAgainst,
            SplitType splitType,
            bool chdsAsFiles,
            bool individual)
        {
            // Get the archive scanning level
            ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);

            // Get a list of files from the input datfiles
            datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);

            // If we are in individual mode, process each DAT on their own
            if (individual)
            {
                foreach (string datfile in datfiles)
                {
                    DatFile datdata = new DatFile();
                    datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);

                    // If we have the depot flag, respect it
                    if (depot)
                    {
                        datdata.VerifyDepot(inputs, headerToCheckAgainst);
                    }
                    else
                    {
                        datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles);
                    }
                }
            }
            // Otherwise, process all DATs into the same output
            else
            {
                InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");

                // Add all of the input DATs into one huge internal DAT
                DatFile datdata = new DatFile();
                foreach (string datfile in datfiles)
                {
                    datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
                }

                watch.Stop();

                // If we have the depot flag, respect it
                if (depot)
                {
                    datdata.VerifyDepot(inputs, headerToCheckAgainst);
                }
                else
                {
                    datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles);
                }
            }
        }
Exemplo n.º 3
0
        /// <summary>
        /// Wrap sorting files using an input DAT
        /// </summary>
        /// <param name="datfiles">Names of the DATs to compare against</param>
        /// <param name="inputs">List of input files/folders to check</param>
        /// <param name="outDir">Output directory to use to build to</param>
        /// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
        /// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
        /// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
        /// <param name="delete">True if input files should be deleted, false otherwise</param>
        /// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
        /// <param name="outputFormat">Output format that files should be written to</param>
        /// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
        /// <param name="sevenzip">Integer representing the archive handling level for 7z</param>
        /// <param name="gz">Integer representing the archive handling level for GZip</param>
        /// <param name="rar">Integer representing the archive handling level for RAR</param>
        /// <param name="zip">Integer representing the archive handling level for Zip</param>
        /// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
        /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
        /// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
        /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
        /// <param name="individual">True if DATs should be sorted individually, false if they should be done in bulk</param>
        private static void InitSort(
            List <string> datfiles,
            List <string> inputs,
            string outDir,
            bool depot,
            bool quickScan,
            bool date,
            bool delete,
            bool inverse,
            OutputFormat outputFormat,
            bool romba,
            int sevenzip,
            int gz,
            int rar,
            int zip,
            bool updateDat,
            string headerToCheckAgainst,
            SplitType splitType,
            bool chdsAsFiles,
            bool individual)
        {
            // Get the archive scanning level
            ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(sevenzip, gz, rar, zip);

            // Get a list of files from the input datfiles
            datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);

            // If we are in individual mode, process each DAT on their own, appending the DAT name to the output dir
            if (individual)
            {
                foreach (string datfile in datfiles)
                {
                    DatFile datdata = new DatFile();
                    datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);

                    // If we have the depot flag, respect it
                    if (depot)
                    {
                        datdata.RebuildDepot(inputs, Path.Combine(outDir, datdata.FileName), date, delete, inverse, outputFormat, romba,
                                             updateDat, headerToCheckAgainst);
                    }
                    else
                    {
                        datdata.RebuildGeneric(inputs, Path.Combine(outDir, datdata.FileName), quickScan, date, delete, inverse, outputFormat, romba, asl,
                                               updateDat, headerToCheckAgainst, chdsAsFiles);
                    }
                }
            }
            // Otherwise, process all DATs into the same output
            else
            {
                InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");

                // Add all of the input DATs into one huge internal DAT
                DatFile datdata = new DatFile();
                foreach (string datfile in datfiles)
                {
                    datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
                }

                watch.Stop();

                // If we have the depot flag, respect it
                if (depot)
                {
                    datdata.RebuildDepot(inputs, outDir, date, delete, inverse, outputFormat, romba,
                                         updateDat, headerToCheckAgainst);
                }
                else
                {
                    datdata.RebuildGeneric(inputs, outDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
                                           updateDat, headerToCheckAgainst, chdsAsFiles);
                }
            }
        }
        /// <summary>
        /// Wrap adding files to the depots
        /// </summary>
        /// <param name="inputs">List of input folders to use</param>
        /// <param name="onlyNeeded">True if only files in the database and don't exist are added, false otherwise</param>
        /// <param name="resume">Resume a previously interrupted operation from the specified path</param>
        /// <param name="includeZips">flag value == 0 means: add Zip files themselves into the depot in addition to their contents, flag value == 2 means add Zip files themselves but don't add content</param>
        /// <param name="workers">How many workers to launch for the job, default from config</param>
        /// <param name="includeGZips">flag value == 0 means: add GZip files themselves into the depot in addition to their contents, flag value == 2 means add GZip files themselves but don't add content</param>
        /// <param name="include7Zips">flag value == 0 means: add 7Zip files themselves into the depot in addition to their contents, flag value == 2 means add 7Zip files themselves but don't add content</param>
        /// <param name="skipInitialScan">True to skip the initial scan of the files to determine amount of work, false otherwise</param>
        /// <param name="useGolangZip">True to use go zip implementation instead of zlib, false otherwise</param>
        /// <param name="noDb">True to archive into depot but do not touch DB index and ignore only-needed flag, false otherwise</param>
        /// TODO: Add ability to update .romba files with proper size AND use the correct depot if it fills up
        /// TODO: Add ability correctly to mark which depot the files are being rebuilt to in the DB
        private static void InitArchive(
            List <string> inputs,
            bool onlyNeeded,
            string resume,
            int includeZips,
            int workers,
            int includeGZips,
            int include7Zips,
            bool skipInitialScan,
            bool useGolangZip, // Obsolete
            bool noDb)
        {
            // First we want to get just all directories from the inputs
            List <string> onlyDirs = new List <string>();

            foreach (string input in inputs)
            {
                if (Directory.Exists(input))
                {
                    onlyDirs.Add(Path.GetFullPath(input));
                }
            }

            // Then process all of the input directories into an internal DAT
            DatFile df = new DatFile();

            foreach (string dir in onlyDirs)
            {
                // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
                df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
                df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
            }

            // Create an empty Dat for files that need to be rebuilt
            DatFile need = new DatFile();

            // Open the database connection
            SqliteConnection dbc = new SqliteConnection(_connectionString);

            dbc.Open();

            // Now that we have the Dats, add the files to the database
            string crcquery     = "INSERT OR IGNORE INTO crc (crc) VALUES";
            string md5query     = "INSERT OR IGNORE INTO md5 (md5) VALUES";
            string sha1query    = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES";
            string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES";
            string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES";

            foreach (string key in df.Keys)
            {
                List <DatItem> datItems = df[key];
                foreach (Rom rom in datItems)
                {
                    // If we care about if the file exists, check the databse first
                    if (onlyNeeded && !noDb)
                    {
                        string query = "SELECT * FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1"
                                       + " WHERE crcsha1.crc=\"" + rom.CRC + "\""
                                       + " OR md5sha1.md5=\"" + rom.MD5 + "\""
                                       + " OR md5sha1.sha1=\"" + rom.SHA1 + "\"";
                        SqliteCommand    slc  = new SqliteCommand(query, dbc);
                        SqliteDataReader sldr = slc.ExecuteReader();

                        if (sldr.HasRows)
                        {
                            // Add to the queries
                            if (!String.IsNullOrWhiteSpace(rom.CRC))
                            {
                                crcquery += " (\"" + rom.CRC + "\"),";
                            }
                            if (!String.IsNullOrWhiteSpace(rom.MD5))
                            {
                                md5query += " (\"" + rom.MD5 + "\"),";
                            }
                            if (!String.IsNullOrWhiteSpace(rom.SHA1))
                            {
                                sha1query += " (\"" + rom.SHA1 + "\", \"" + _depots.Keys.ToList()[0] + "\"),";

                                if (!String.IsNullOrWhiteSpace(rom.CRC))
                                {
                                    crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),";
                                }
                                if (!String.IsNullOrWhiteSpace(rom.MD5))
                                {
                                    md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),";
                                }
                            }

                            // Add to the Dat
                            need.Add(key, rom);
                        }
                    }
                    // Otherwise, just add the file to the list
                    else
                    {
                        // Add to the queries
                        if (!noDb)
                        {
                            if (!String.IsNullOrWhiteSpace(rom.CRC))
                            {
                                crcquery += " (\"" + rom.CRC + "\"),";
                            }
                            if (!String.IsNullOrWhiteSpace(rom.MD5))
                            {
                                md5query += " (\"" + rom.MD5 + "\"),";
                            }
                            if (!String.IsNullOrWhiteSpace(rom.SHA1))
                            {
                                sha1query += " (\"" + rom.SHA1 + "\", \"" + _depots.Keys.ToList()[0] + "\"),";

                                if (!String.IsNullOrWhiteSpace(rom.CRC))
                                {
                                    crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),";
                                }
                                if (!String.IsNullOrWhiteSpace(rom.MD5))
                                {
                                    md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),";
                                }
                            }
                        }

                        // Add to the Dat
                        need.Add(key, rom);
                    }
                }
            }

            // Now run the queries, if they're populated
            if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES")
            {
                SqliteCommand slc = new SqliteCommand(crcquery.TrimEnd(','), dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();
            }
            if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES")
            {
                SqliteCommand slc = new SqliteCommand(md5query.TrimEnd(','), dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();
            }
            if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES")
            {
                SqliteCommand slc = new SqliteCommand(sha1query.TrimEnd(','), dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();
            }
            if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES")
            {
                SqliteCommand slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();
            }
            if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES")
            {
                SqliteCommand slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();
            }

            // Create the sorting object to use and rebuild the needed files
            ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(include7Zips, includeGZips, 2, includeZips);

            need.RebuildGeneric(onlyDirs, _depots.Keys.ToList()[0], false /*quickScan*/, false /*date*/,
                                false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzip, true /*romba*/, asl, false /*updateDat*/,
                                null /*headerToCheckAgainst*/, true /* chdsAsFiles */);
        }