Exemplo n.º 1
0
        public override bool ProcessFeatures(Dictionary <string, Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get feature flags
            string name        = GetString(features, NameStringValue);
            string description = GetString(features, DescriptionStringValue);
            string source      = GetString(features, SourceStringValue);
            string outdat      = GetString(features, OutStringValue);

            // Ensure the output directory
            outdat.Ensure(create: true);

            // Check that all required directories exist
            if (!Directory.Exists(source))
            {
                logger.Error($"File '{source}' does not exist!");
                return(false);
            }

            // Create and write the encapsulating datfile
            DatFile datfile = DatFile.Create();

            datfile.Header.Name        = string.IsNullOrWhiteSpace(name) ? "untitled" : name;
            datfile.Header.Description = description;
            DatFromDir.PopulateFromDir(datfile, source, asFiles: TreatAsFile.NonArchive, hashes: Hash.Standard);
            Writer.Write(datfile, outdat);
            return(true);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Output duplicate item diff
        /// </summary>
        /// <param name="datFile">Current DatFile object to use for updating</param>
        /// <param name="inputs">List of inputs to write out from</param>
        public static DatFile DiffDuplicates(DatFile datFile, List <ParentablePath> inputs)
        {
            InternalStopwatch watch = new InternalStopwatch("Initializing duplicate DAT");

            // Fill in any information not in the base DAT
            if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
            {
                datFile.Header.FileName = "All DATs";
            }

            if (string.IsNullOrWhiteSpace(datFile.Header.Name))
            {
                datFile.Header.Name = "datFile.All DATs";
            }

            if (string.IsNullOrWhiteSpace(datFile.Header.Description))
            {
                datFile.Header.Description = "datFile.All DATs";
            }

            string  post     = " (Duplicates)";
            DatFile dupeData = DatFile.Create(datFile.Header);

            dupeData.Header.FileName    += post;
            dupeData.Header.Name        += post;
            dupeData.Header.Description += post;
            dupeData.Items = new ItemDictionary();

            watch.Stop();

            // Now, loop through the dictionary and populate the correct DATs
            watch.Start("Populating duplicate DAT");

            Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
            {
                ConcurrentList <DatItem> items = DatItem.Merge(datFile.Items[key]);

                // If the rom list is empty or null, just skip it
                if (items == null || items.Count == 0)
                {
                    return;
                }

                // Loop through and add the items correctly
                foreach (DatItem item in items)
                {
                    if (item.DupeType.HasFlag(DupeType.External))
                    {
                        DatItem newrom       = item.Clone() as DatItem;
                        newrom.Machine.Name += $" ({Path.GetFileNameWithoutExtension(inputs[item.Source.Index].CurrentPath)})";

                        dupeData.Items.Add(key, newrom);
                    }
                }
            });

            watch.Stop();

            return(dupeData);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Split a DAT by type of DatItem
        /// </summary>
        /// <param name="datFile">Current DatFile object to split</param>
        /// <returns>Dictionary of ItemType to DatFile mappings</returns>
        public static Dictionary <ItemType, DatFile> SplitByType(DatFile datFile)
        {
            // Create each of the respective output DATs
            InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by item type");

            // Create the set of type-to-dat mappings
            Dictionary <ItemType, DatFile> typeDats = new Dictionary <ItemType, DatFile>();

            // We only care about a subset of types
            List <ItemType> outputTypes = new List <ItemType>
            {
                ItemType.Disk,
                ItemType.Media,
                ItemType.Rom,
                ItemType.Sample,
            };

            // Setup all of the DatFiles
            foreach (ItemType itemType in outputTypes)
            {
                typeDats[itemType] = DatFile.Create(datFile.Header.CloneStandard());
                typeDats[itemType].Header.FileName    += $" ({itemType})";
                typeDats[itemType].Header.Name        += $" ({itemType})";
                typeDats[itemType].Header.Description += $" ({itemType})";
            }

            // Now populate each of the DAT objects in turn
            Parallel.ForEach(outputTypes, Globals.ParallelOptions, itemType =>
            {
                FillWithItemType(datFile, typeDats[itemType], itemType);
            });

            watch.Stop();
            return(typeDats);
        }
Exemplo n.º 4
0
        /// <summary>
        /// Populate from multiple paths while returning the invividual headers
        /// </summary>
        /// <param name="datFile">Current DatFile object to use for updating</param>
        /// <param name="inputs">Paths to DATs to parse</param>
        /// <returns>List of DatHeader objects representing headers</returns>
        public static List <DatHeader> PopulateUserData(DatFile datFile, List <ParentablePath> inputs)
        {
            DatFile[]         datFiles = new DatFile[inputs.Count];
            InternalStopwatch watch    = new InternalStopwatch("Processing individual DATs");

            // Parse all of the DATs into their own DatFiles in the array
            Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
            {
                var input = inputs[i];
                logger.User($"Adding DAT: {input.CurrentPath}");
                datFiles[i] = DatFile.Create(datFile.Header.CloneFiltering());
                Parser.ParseInto(datFiles[i], input, i, keep: true);
            });

            watch.Stop();

            watch.Start("Populating internal DAT");
            for (int i = 0; i < inputs.Count; i++)
            {
                AddFromExisting(datFile, datFiles[i], true);
            }

            watch.Stop();

            return(datFiles.Select(d => d.Header).ToList());
        }
Exemplo n.º 5
0
        public override void ProcessFeatures(Dictionary <string, Feature> features)
        {
            base.ProcessFeatures(features);

            // Get feature flags
            string name        = GetString(features, NameStringValue);
            string description = GetString(features, DescriptionStringValue);
            string source      = GetString(features, SourceStringValue);
            string outdat      = GetString(features, OutStringValue);

            // Ensure the output directory
            DirectoryExtensions.Ensure(outdat, create: true);

            // Check that all required directories exist
            if (!Directory.Exists(source))
            {
                logger.Error($"File '{source}' does not exist!");
                return;
            }

            // Create and write the encapsulating datfile
            DatFile datfile = DatFile.Create();

            datfile.Header.Name        = string.IsNullOrWhiteSpace(name) ? "untitled" : name;
            datfile.Header.Description = description;
            datfile.PopulateFromDir(source, asFiles: TreatAsFile.NonArchive);
            datfile.ApplyCleaning(new Cleaner()
            {
                ExcludeFields = Hash.DeepHashes.AsFields()
            });
            datfile.Write(outdat);
        }
Exemplo n.º 6
0
        /// <summary>
        /// Create a DatFile and parse a file into it
        /// </summary>
        /// <param name="filename">Name of the file to be parsed</param>
        /// <param name="statsOnly">True to only add item statistics while parsing, false otherwise</param>
        /// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
        public static DatFile CreateAndParse(string filename, bool statsOnly = false, bool throwOnError = false)
        {
            DatFile datFile = DatFile.Create();

            ParseInto(datFile, new ParentablePath(filename), statsOnly: statsOnly, throwOnError: throwOnError);
            return(datFile);
        }
Exemplo n.º 7
0
        /// <summary>
        /// Output cascading diffs
        /// </summary>
        /// <param name="datFile">Current DatFile object to use for updating</param>
        /// <param name="datHeaders">Dat headers used optionally</param>
        /// <returns>List of DatFiles representing the individually indexed items</returns>
        public static List <DatFile> DiffCascade(DatFile datFile, List <DatHeader> datHeaders)
        {
            // Create a list of DatData objects representing output files
            List <DatFile> outDats = new List <DatFile>();

            // Ensure the current DatFile is sorted optimally
            datFile.Items.BucketBy(ItemKey.CRC, DedupeType.None);

            // Loop through each of the inputs and get or create a new DatData object
            InternalStopwatch watch = new InternalStopwatch("Initializing and filling all output DATs");

            // Create the DatFiles from the set of headers
            DatFile[] outDatsArray = new DatFile[datHeaders.Count];
            Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
            {
                DatFile diffData = DatFile.Create(datHeaders[j]);
                diffData.Items   = new ItemDictionary();
                FillWithSourceIndex(datFile, diffData, j);
                outDatsArray[j] = diffData;
            });

            outDats = outDatsArray.ToList();
            watch.Stop();

            return(outDats);
        }
Exemplo n.º 8
0
        /// <summary>
        /// Split a DAT by input extensions
        /// </summary>
        /// <param name="datFile">Current DatFile object to split</param>
        /// <param name="extA">List of extensions to split on (first DAT)</param>
        /// <param name="extB">List of extensions to split on (second DAT)</param>
        /// <returns>Extension Set A and Extension Set B DatFiles</returns>
        public static (DatFile extADat, DatFile extBDat) SplitByExtension(DatFile datFile, List <string> extA, List <string> extB)
        {
            // If roms is empty, return false
            if (datFile.Items.TotalCount == 0)
            {
                return(null, null);
            }

            InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by extension");

            // Make sure all of the extensions don't have a dot at the beginning
            var    newExtA       = extA.Select(s => s.TrimStart('.').ToLowerInvariant());
            string newExtAString = string.Join(",", newExtA);

            var    newExtB       = extB.Select(s => s.TrimStart('.').ToLowerInvariant());
            string newExtBString = string.Join(",", newExtB);

            // Set all of the appropriate outputs for each of the subsets
            DatFile extADat = DatFile.Create(datFile.Header.CloneStandard());

            extADat.Header.FileName    += $" ({newExtAString})";
            extADat.Header.Name        += $" ({newExtAString})";
            extADat.Header.Description += $" ({newExtAString})";

            DatFile extBDat = DatFile.Create(datFile.Header.CloneStandard());

            extBDat.Header.FileName    += $" ({newExtBString})";
            extBDat.Header.Name        += $" ({newExtBString})";
            extBDat.Header.Description += $" ({newExtBString})";

            // Now separate the roms accordingly
            Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
            {
                ConcurrentList <DatItem> items = datFile.Items[key];
                foreach (DatItem item in items)
                {
                    if (newExtA.Contains((item.GetName() ?? string.Empty).GetNormalizedExtension()))
                    {
                        extADat.Items.Add(key, item);
                    }
                    else if (newExtB.Contains((item.GetName() ?? string.Empty).GetNormalizedExtension()))
                    {
                        extBDat.Items.Add(key, item);
                    }
                    else
                    {
                        extADat.Items.Add(key, item);
                        extBDat.Items.Add(key, item);
                    }
                }
            });

            // Then return both DatFiles
            watch.Stop();
            return(extADat, extBDat);
        }
Exemplo n.º 9
0
        /// <summary>
        /// Split a DAT by size of Rom
        /// </summary>
        /// <param name="datFile">Current DatFile object to split</param>
        /// <param name="radix">Long value representing the split point</param>
        /// <returns>Less Than and Greater Than DatFiles</returns>
        public static (DatFile lessThan, DatFile greaterThan) SplitBySize(DatFile datFile, long radix)
        {
            // Create each of the respective output DATs
            InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by size");

            DatFile lessThan = DatFile.Create(datFile.Header.CloneStandard());

            lessThan.Header.FileName    += $" (less than {radix})";
            lessThan.Header.Name        += $" (less than {radix})";
            lessThan.Header.Description += $" (less than {radix})";

            DatFile greaterThan = DatFile.Create(datFile.Header.CloneStandard());

            greaterThan.Header.FileName    += $" (equal-greater than {radix})";
            greaterThan.Header.Name        += $" (equal-greater than {radix})";
            greaterThan.Header.Description += $" (equal-greater than {radix})";

            // Now populate each of the DAT objects in turn
            Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
            {
                ConcurrentList <DatItem> items = datFile.Items[key];
                foreach (DatItem item in items)
                {
                    // If the file is not a Rom, it automatically goes in the "lesser" dat
                    if (item.ItemType != ItemType.Rom)
                    {
                        lessThan.Items.Add(key, item);
                    }

                    // If the file is a Rom and has no size, put it in the "lesser" dat
                    else if (item.ItemType == ItemType.Rom && (item as Rom).Size == null)
                    {
                        lessThan.Items.Add(key, item);
                    }

                    // If the file is a Rom and less than the radix, put it in the "lesser" dat
                    else if (item.ItemType == ItemType.Rom && (item as Rom).Size < radix)
                    {
                        lessThan.Items.Add(key, item);
                    }

                    // If the file is a Rom and greater than or equal to the radix, put it in the "greater" dat
                    else if (item.ItemType == ItemType.Rom && (item as Rom).Size >= radix)
                    {
                        greaterThan.Items.Add(key, item);
                    }
                }
            });

            // Then return both DatFiles
            watch.Stop();
            return(lessThan, greaterThan);
        }
Exemplo n.º 10
0
        /// <summary>
        /// Parse a DAT and return all found games and roms within
        /// </summary>
        /// <param name="datFile">Current DatFile object to add to</param>
        /// <param name="input">Name of the file to be parsed</param>
        /// <param name="indexId">Index ID for the DAT</param>
        /// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
        /// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
        /// <param name="quotes">True if quotes are assumed in supported types (default), false otherwise</param>
        /// <param name="statsOnly">True to only add item statistics while parsing, false otherwise</param>
        /// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
        public static void ParseInto(
            DatFile datFile,
            ParentablePath input,
            int indexId       = 0,
            bool keep         = false,
            bool keepext      = false,
            bool quotes       = true,
            bool statsOnly    = false,
            bool throwOnError = true)
        {
            // Get the current path from the filename
            string currentPath = input.CurrentPath;

            // Check the file extension first as a safeguard
            if (!Utilities.HasValidDatExtension(currentPath))
            {
                return;
            }

            // If the output filename isn't set already, get the internal filename
            datFile.Header.FileName = string.IsNullOrWhiteSpace(datFile.Header.FileName)
                ? (keepext
                    ? Path.GetFileName(currentPath)
                    : Path.GetFileNameWithoutExtension(currentPath))
                : datFile.Header.FileName;

            // If the output type isn't set already, get the internal output type
            DatFormat currentPathFormat = GetDatFormat(currentPath);

            datFile.Header.DatFormat = datFile.Header.DatFormat == 0 ? currentPathFormat : datFile.Header.DatFormat;
            datFile.Items.SetBucketedBy(ItemKey.CRC); // Setting this because it can reduce issues later

            InternalStopwatch watch = new InternalStopwatch($"Parsing '{currentPath}' into internal DAT");

            // Now parse the correct type of DAT
            try
            {
                var parsingDatFile = DatFile.Create(currentPathFormat, datFile, quotes);
                parsingDatFile?.ParseFile(currentPath, indexId, keep, statsOnly: statsOnly, throwOnError: throwOnError);
            }
            catch (Exception ex) when(!throwOnError)
            {
                logger.Error(ex, $"Error with file '{currentPath}'");
            }

            watch.Stop();
        }
Exemplo n.º 11
0
        public override bool ProcessFeatures(Dictionary <string, Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get feature flags
            string name        = GetString(features, NameStringValue);
            string description = GetString(features, DescriptionStringValue);
            string newdat      = GetString(features, NewStringValue);
            string olddat      = GetString(features, OldStringValue);
            string outdat      = GetString(features, OutStringValue);

            // Ensure the output directory
            outdat.Ensure(create: true);

            // Check that all required files exist
            if (!File.Exists(olddat))
            {
                logger.Error($"File '{olddat}' does not exist!");
                return(false);
            }

            if (!File.Exists(newdat))
            {
                logger.Error($"File '{newdat}' does not exist!");
                return(false);
            }

            // Create the encapsulating datfile
            DatFile datfile = DatFile.Create();

            datfile.Header.Name        = name;
            datfile.Header.Description = description;
            Parser.ParseInto(datfile, olddat);

            // Diff against the new datfile
            DatFile intDat = Parser.CreateAndParse(newdat);

            DatFileTool.DiffAgainst(datfile, intDat, false);
            Writer.Write(intDat, outdat);
            return(true);
        }
Exemplo n.º 12
0
        /// <summary>
        /// Split a SuperDAT by lowest available directory level
        /// </summary>
        /// <param name="datFile">Current DatFile object to split</param>
        /// <param name="outDir">Name of the directory to write the DATs out to</param>
        /// <param name="shortname">True if short names should be used, false otherwise</param>
        /// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
        /// <returns>True if split succeeded, false otherwise</returns>
        public static bool SplitByLevel(DatFile datFile, string outDir, bool shortname, bool basedat)
        {
            InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by level");

            // First, bucket by games so that we can do the right thing
            datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, lower: false, norename: true);

            // Create a temporary DAT to add things to
            DatFile tempDat = DatFile.Create(datFile.Header);

            tempDat.Header.Name = null;

            // Sort the input keys
            List <string> keys = datFile.Items.Keys.ToList();

            keys.Sort(SplitByLevelSort);

            // Then, we loop over the games
            Parallel.ForEach(keys, Globals.ParallelOptions, key =>
            {
                // Here, the key is the name of the game to be used for comparison
                if (tempDat.Header.Name != null && tempDat.Header.Name != Path.GetDirectoryName(key))
                {
                    // Reset the DAT for the next items
                    tempDat             = DatFile.Create(datFile.Header);
                    tempDat.Header.Name = null;
                }

                // Clean the input list and set all games to be pathless
                ConcurrentList <DatItem> items                 = datFile.Items[key];
                items.ForEach(item => item.Machine.Name        = Path.GetFileName(item.Machine.Name));
                items.ForEach(item => item.Machine.Description = Path.GetFileName(item.Machine.Description));

                // Now add the game to the output DAT
                tempDat.Items.AddRange(key, items);

                // Then set the DAT name to be the parent directory name
                tempDat.Header.Name = Path.GetDirectoryName(key);
            });

            watch.Stop();
            return(true);
        }
Exemplo n.º 13
0
        public override void ProcessFeatures(Dictionary <string, Feature> features)
        {
            base.ProcessFeatures(features);

            // Get feature flags
            string name        = GetString(features, NameStringValue);
            string description = GetString(features, DescriptionStringValue);
            string newdat      = GetString(features, NewStringValue);
            string olddat      = GetString(features, OldStringValue);
            string outdat      = GetString(features, OutStringValue);

            // Ensure the output directory
            DirectoryExtensions.Ensure(outdat, create: true);

            // Check that all required files exist
            if (!File.Exists(olddat))
            {
                logger.Error($"File '{olddat}' does not exist!");
                return;
            }

            if (!File.Exists(newdat))
            {
                logger.Error($"File '{newdat}' does not exist!");
                return;
            }

            // Create the encapsulating datfile
            DatFile datfile = DatFile.Create();

            datfile.Header.Name        = name;
            datfile.Header.Description = description;
            datfile.Parse(olddat);

            // Diff against the new datfile
            DatFile intDat = DatFile.CreateAndParse(newdat);

            datfile.DiffAgainst(intDat, false);
            intDat.Write(outdat);
        }
Exemplo n.º 14
0
 /// <summary>
 /// Reset the current state
 /// </summary>
 public void Reset()
 {
     this.Index           = 0;
     this.DatFile         = DatFile.Create();
     this.OutputDirectory = null;
 }
Exemplo n.º 15
0
        public override bool ProcessFeatures(Dictionary <string, SabreTools.Help.Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get the archive scanning level
            // TODO: Remove usage
            int sevenzip = GetInt32(features, Include7ZipsInt32Value);
            int gz       = GetInt32(features, IncludeGZipsInt32Value);
            int zip      = GetInt32(features, IncludeZipsInt32Value);

            // Get feature flags
            bool noDb       = GetBoolean(features, NoDbValue);
            bool onlyNeeded = GetBoolean(features, OnlyNeededValue);

            // First we want to get just all directories from the inputs
            List <string> onlyDirs = new List <string>();

            foreach (string input in Inputs)
            {
                if (Directory.Exists(input))
                {
                    onlyDirs.Add(Path.GetFullPath(input));
                }
            }

            // Then process all of the input directories into an internal DAT
            DatFile df = DatFile.Create();

            foreach (string dir in onlyDirs)
            {
                DatFromDir.PopulateFromDir(df, dir, asFiles: TreatAsFile.NonArchive, hashes: Hash.Standard);
                DatFromDir.PopulateFromDir(df, dir, asFiles: TreatAsFile.All, hashes: Hash.Standard);
            }

            // Create an empty Dat for files that need to be rebuilt
            DatFile need = DatFile.Create();

            // Open the database connection
            SqliteConnection dbc = new SqliteConnection(_connectionString);

            dbc.Open();

            // Now that we have the Dats, add the files to the database
            string crcquery     = "INSERT OR IGNORE INTO crc (crc) VALUES";
            string md5query     = "INSERT OR IGNORE INTO md5 (md5) VALUES";
            string sha1query    = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES";
            string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES";
            string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES";

            foreach (string key in df.Items.Keys)
            {
                ConcurrentList <DatItem> datItems = df.Items[key];
                foreach (Rom rom in datItems)
                {
                    // If we care about if the file exists, check the databse first
                    if (onlyNeeded && !noDb)
                    {
                        string query = "SELECT * FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1"
                                       + $" WHERE crcsha1.crc=\"{rom.CRC}\""
                                       + $" OR md5sha1.md5=\"{rom.MD5}\""
                                       + $" OR md5sha1.sha1=\"{rom.SHA1}\"";
                        SqliteCommand    slc  = new SqliteCommand(query, dbc);
                        SqliteDataReader sldr = slc.ExecuteReader();

                        if (sldr.HasRows)
                        {
                            // Add to the queries
                            if (!string.IsNullOrWhiteSpace(rom.CRC))
                            {
                                crcquery += $" (\"{rom.CRC}\"),";
                            }

                            if (!string.IsNullOrWhiteSpace(rom.MD5))
                            {
                                md5query += $" (\"{rom.MD5}\"),";
                            }

                            if (!string.IsNullOrWhiteSpace(rom.SHA1))
                            {
                                sha1query += $" (\"{rom.SHA1}\", \"{_depots.Keys.ToList()[0]}\"),";

                                if (!string.IsNullOrWhiteSpace(rom.CRC))
                                {
                                    crcsha1query += $" (\"{rom.CRC}\", \"{rom.SHA1}\"),";
                                }

                                if (!string.IsNullOrWhiteSpace(rom.MD5))
                                {
                                    md5sha1query += $" (\"{rom.MD5}\", \"{rom.SHA1}\"),";
                                }
                            }

                            // Add to the Dat
                            need.Items.Add(key, rom);
                        }
                    }
                    // Otherwise, just add the file to the list
                    else
                    {
                        // Add to the queries
                        if (!noDb)
                        {
                            if (!string.IsNullOrWhiteSpace(rom.CRC))
                            {
                                crcquery += $" (\"{rom.CRC}\"),";
                            }

                            if (!string.IsNullOrWhiteSpace(rom.MD5))
                            {
                                md5query += $" (\"{rom.MD5}\"),";
                            }

                            if (!string.IsNullOrWhiteSpace(rom.SHA1))
                            {
                                sha1query += $" (\"{rom.SHA1}\", \"{_depots.Keys.ToList()[0]}\"),";

                                if (!string.IsNullOrWhiteSpace(rom.CRC))
                                {
                                    crcsha1query += $" (\"{rom.CRC}\", \"{rom.SHA1}\"),";
                                }

                                if (!string.IsNullOrWhiteSpace(rom.MD5))
                                {
                                    md5sha1query += $" (\"{rom.MD5}\", \"{rom.SHA1}\"),";
                                }
                            }
                        }

                        // Add to the Dat
                        need.Items.Add(key, rom);
                    }
                }
            }

            // Now run the queries, if they're populated
            if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES")
            {
                SqliteCommand slc = new SqliteCommand(crcquery.TrimEnd(','), dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();
            }

            if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES")
            {
                SqliteCommand slc = new SqliteCommand(md5query.TrimEnd(','), dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();
            }

            if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES")
            {
                SqliteCommand slc = new SqliteCommand(sha1query.TrimEnd(','), dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();
            }

            if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES")
            {
                SqliteCommand slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();
            }

            if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES")
            {
                SqliteCommand slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();
            }

            // Create the sorting object to use and rebuild the needed files
            Rebuilder.RebuildGeneric(
                need,
                onlyDirs,
                outDir: _depots.Keys.ToList()[0],
                outputFormat: OutputFormat.TorrentGzipRomba,
                asFiles: TreatAsFile.NonArchive);

            return(true);
        }
Exemplo n.º 16
0
        public override bool ProcessFeatures(Dictionary <string, Help.Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get the splitting mode
            SplittingMode splittingMode = GetSplittingMode(features);

            if (splittingMode == SplittingMode.None)
            {
                logger.Error("No valid splitting mode found!");
                return(false);
            }

            // Get only files from the inputs
            List <ParentablePath> files = PathTool.GetFilesOnly(Inputs, appendparent: true);

            // Loop over the input files
            foreach (ParentablePath file in files)
            {
                // Create and fill the new DAT
                DatFile internalDat = DatFile.Create(Header);
                Parser.ParseInto(internalDat, file);

                // Get the output directory
                OutputDir = OutputDir.Ensure();
                OutputDir = file.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                // Extension splitting
                if (splittingMode.HasFlag(SplittingMode.Extension))
                {
                    (DatFile extADat, DatFile extBDat) = DatTools.Splitter.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue));

                    InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs");

                    // Output both possible DatFiles
                    Writer.Write(extADat, OutputDir);
                    Writer.Write(extBDat, OutputDir);

                    watch.Stop();
                }

                // Hash splitting
                if (splittingMode.HasFlag(SplittingMode.Hash))
                {
                    Dictionary <DatItemField, DatFile> typeDats = DatTools.Splitter.SplitByHash(internalDat);

                    InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs");

                    // Loop through each type DatFile
                    Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType =>
                    {
                        Writer.Write(typeDats[itemType], OutputDir);
                    });

                    watch.Stop();
                }

                // Level splitting
                if (splittingMode.HasFlag(SplittingMode.Level))
                {
                    logger.Warning("This feature is not implemented: level-split");
                    DatTools.Splitter.SplitByLevel(
                        internalDat,
                        OutputDir,
                        GetBoolean(features, ShortValue),
                        GetBoolean(features, BaseValue));
                }

                // Size splitting
                if (splittingMode.HasFlag(SplittingMode.Size))
                {
                    (DatFile lessThan, DatFile greaterThan) = DatTools.Splitter.SplitBySize(internalDat, GetInt64(features, RadixInt64Value));

                    InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs");

                    // Output both possible DatFiles
                    Writer.Write(lessThan, OutputDir);
                    Writer.Write(greaterThan, OutputDir);

                    watch.Stop();
                }

                // Total Size splitting
                if (splittingMode.HasFlag(SplittingMode.TotalSize))
                {
                    logger.Warning("This feature is not implemented: level-split");
                    List <DatFile> sizedDats = DatTools.Splitter.SplitByTotalSize(internalDat, GetInt64(features, ChunkSizeInt64Value));

                    InternalStopwatch watch = new InternalStopwatch("Outputting total-size-split DATs");

                    // Loop through each type DatFile
                    Parallel.ForEach(sizedDats, Globals.ParallelOptions, sizedDat =>
                    {
                        Writer.Write(sizedDat, OutputDir);
                    });

                    watch.Stop();
                }

                // Type splitting
                if (splittingMode.HasFlag(SplittingMode.Type))
                {
                    Dictionary <ItemType, DatFile> typeDats = DatTools.Splitter.SplitByType(internalDat);

                    InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs");

                    // Loop through each type DatFile
                    Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType =>
                    {
                        Writer.Write(typeDats[itemType], OutputDir);
                    });

                    watch.Stop();
                }
            }

            return(true);
        }
Exemplo n.º 17
0
        public override bool ProcessFeatures(Dictionary <string, SabreTools.Help.Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            logger.Error("This feature is not yet implemented: rescan-depots");

            foreach (string depotname in Inputs)
            {
                // Check that it's a valid depot first
                if (!_depots.ContainsKey(depotname))
                {
                    logger.User($"'{depotname}' is not a recognized depot. Please add it to your configuration file and try again");
                    return(false);
                }

                // Then check that the depot is online
                if (!Directory.Exists(depotname))
                {
                    logger.User($"'{depotname}' does not appear to be online. Please check its status and try again");
                    return(false);
                }

                // Open the database connection
                SqliteConnection dbc = new SqliteConnection(_connectionString);
                dbc.Open();

                // If we have it, then check for all hashes that are in that depot
                List <string>    hashes = new List <string>();
                string           query  = $"SELECT sha1 FROM sha1 WHERE depot=\"{depotname}\"";
                SqliteCommand    slc    = new SqliteCommand(query, dbc);
                SqliteDataReader sldr   = slc.ExecuteReader();
                if (sldr.HasRows)
                {
                    while (sldr.Read())
                    {
                        hashes.Add(sldr.GetString(0));
                    }
                }

                // Now rescan the depot itself
                DatFile depot = DatFile.Create();
                DatFromDir.PopulateFromDir(depot, depotname, asFiles: TreatAsFile.NonArchive, hashes: Hash.Standard);
                depot.Items.BucketBy(ItemKey.SHA1, DedupeType.None);

                // Set the base queries to use
                string crcquery     = "INSERT OR IGNORE INTO crc (crc) VALUES";
                string md5query     = "INSERT OR IGNORE INTO md5 (md5) VALUES";
                string sha1query    = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES";
                string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES";
                string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES";

                // Once we have both, check for any new files
                List <string>        dupehashes = new List <string>();
                IEnumerable <string> keys       = depot.Items.Keys;
                foreach (string key in keys)
                {
                    ConcurrentList <DatItem> roms = depot.Items[key];
                    foreach (Rom rom in roms)
                    {
                        if (hashes.Contains(rom.SHA1))
                        {
                            dupehashes.Add(rom.SHA1);
                            hashes.Remove(rom.SHA1);
                        }
                        else if (!dupehashes.Contains(rom.SHA1))
                        {
                            if (!string.IsNullOrWhiteSpace(rom.CRC))
                            {
                                crcquery += $" (\"{rom.CRC}\"),";
                            }

                            if (!string.IsNullOrWhiteSpace(rom.MD5))
                            {
                                md5query += $" (\"{rom.MD5}\"),";
                            }

                            if (!string.IsNullOrWhiteSpace(rom.SHA1))
                            {
                                sha1query += $" (\"{rom.SHA1}\", \"{depotname}\"),";

                                if (!string.IsNullOrWhiteSpace(rom.CRC))
                                {
                                    crcsha1query += $" (\"{rom.CRC}\", \"{rom.SHA1}\"),";
                                }

                                if (!string.IsNullOrWhiteSpace(rom.MD5))
                                {
                                    md5sha1query += $" (\"{rom.MD5}\", \"{rom.SHA1}\"),";
                                }
                            }
                        }
                    }
                }

                // Now run the queries after fixing them
                if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES")
                {
                    slc = new SqliteCommand(crcquery.TrimEnd(','), dbc);
                    slc.ExecuteNonQuery();
                }

                if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES")
                {
                    slc = new SqliteCommand(md5query.TrimEnd(','), dbc);
                    slc.ExecuteNonQuery();
                }

                if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES")
                {
                    slc = new SqliteCommand(sha1query.TrimEnd(','), dbc);
                    slc.ExecuteNonQuery();
                }

                if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES")
                {
                    slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc);
                    slc.ExecuteNonQuery();
                }

                if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES")
                {
                    slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc);
                    slc.ExecuteNonQuery();
                }

                // Now that we've added the information, we get to remove all of the hashes that we want to
                query  = @"DELETE FROM sha1
JOIN crcsha1
    ON sha1.sha1=crcsha1.sha1
JOIN md5sha1
    ON sha1.sha1=md5sha1.sha1
JOIN crc
    ON crcsha1.crc=crc.crc
JOIN md5
    ON md5sha1.md5=md5.md5
WHERE sha1.sha1 IN ";
                query += $"({string.Join("\",\"", hashes)}\")";
                slc    = new SqliteCommand(query, dbc);
                slc.ExecuteNonQuery();

                // Dispose of the database connection
                slc.Dispose();
                dbc.Dispose();
            }

            return(true);
        }
Exemplo n.º 18
0
        public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features)
        {
            base.ProcessFeatures(features);
            SplittingMode splittingMode = GetSplittingMode(features);

            // If we somehow have the "none" split type, return
            if (splittingMode == SplittingMode.None)
            {
                return;
            }

            // Get only files from the inputs
            List <ParentablePath> files = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);

            // Loop over the input files
            foreach (ParentablePath file in files)
            {
                // Create and fill the new DAT
                DatFile internalDat = DatFile.Create(Header);
                internalDat.Parse(file);

                // Get the output directory
                OutputDir = file.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                // Extension splitting
                if (splittingMode.HasFlag(SplittingMode.Extension))
                {
                    (DatFile extADat, DatFile extBDat) = internalDat.SplitByExtension(GetList(features, ExtAListValue), GetList(features, ExtBListValue));

                    InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs");

                    // Output both possible DatFiles
                    extADat.Write(OutputDir);
                    extBDat.Write(OutputDir);

                    watch.Stop();
                }

                // Hash splitting
                if (splittingMode.HasFlag(SplittingMode.Hash))
                {
                    Dictionary <Field, DatFile> typeDats = internalDat.SplitByHash();

                    InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs");

                    // Loop through each type DatFile
                    Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType =>
                    {
                        typeDats[itemType].Write(OutputDir);
                    });

                    watch.Stop();
                }

                // Level splitting
                if (splittingMode.HasFlag(SplittingMode.Level))
                {
                    logger.Warning("This feature is not implemented: level-split");
                    internalDat.SplitByLevel(
                        OutputDir,
                        GetBoolean(features, ShortValue),
                        GetBoolean(features, BaseValue));
                }

                // Size splitting
                if (splittingMode.HasFlag(SplittingMode.Size))
                {
                    (DatFile lessThan, DatFile greaterThan) = internalDat.SplitBySize(GetInt64(features, RadixInt64Value));

                    InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs");

                    // Output both possible DatFiles
                    lessThan.Write(OutputDir);
                    greaterThan.Write(OutputDir);

                    watch.Stop();
                }

                // Type splitting
                if (splittingMode.HasFlag(SplittingMode.Type))
                {
                    Dictionary <ItemType, DatFile> typeDats = internalDat.SplitByType();

                    InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs");

                    // Loop through each type DatFile
                    Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType =>
                    {
                        typeDats[itemType].Write(OutputDir);
                    });

                    watch.Stop();
                }
            }
        }
Exemplo n.º 19
0
        public override void ProcessFeatures(Dictionary <string, Feature> features)
        {
            base.ProcessFeatures(features);

            // Get feature flags
            TreatAsFile asFiles      = GetTreatAsFiles(features);
            bool        date         = GetBoolean(features, AddDateValue);
            bool        delete       = GetBoolean(features, DeleteValue);
            bool        inverse      = GetBoolean(features, InverseValue);
            bool        quickScan    = GetBoolean(features, QuickValue);
            bool        updateDat    = GetBoolean(features, UpdateDatValue);
            var         outputFormat = GetOutputFormat(features);

            // If we have the romba flag
            if (Header.OutputDepot?.IsActive == true)
            {
                // Update TorrentGzip output
                if (outputFormat == OutputFormat.TorrentGzip)
                {
                    outputFormat = OutputFormat.TorrentGzipRomba;
                }

                // Update TorrentXz output
                else if (outputFormat == OutputFormat.TorrentXZ)
                {
                    outputFormat = OutputFormat.TorrentXZRomba;
                }
            }

            // Get a list of files from the input datfiles
            var datfiles     = GetList(features, DatListValue);
            var datfilePaths = DirectoryExtensions.GetFilesOnly(datfiles);

            // If we are in individual mode, process each DAT on their own, appending the DAT name to the output dir
            if (GetBoolean(features, IndividualValue))
            {
                foreach (ParentablePath datfile in datfilePaths)
                {
                    DatFile datdata = DatFile.Create();
                    datdata.Parse(datfile, int.MaxValue, keep: true);

                    // Set depot information
                    datdata.Header.InputDepot  = Header.InputDepot.Clone() as DepotInformation;
                    datdata.Header.OutputDepot = Header.OutputDepot.Clone() as DepotInformation;

                    // If we have overridden the header skipper, set it now
                    if (!string.IsNullOrEmpty(Header.HeaderSkipper))
                    {
                        datdata.Header.HeaderSkipper = Header.HeaderSkipper;
                    }

                    // If we have the depot flag, respect it
                    bool success;
                    if (Header.InputDepot?.IsActive ?? false)
                    {
                        success = datdata.RebuildDepot(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), date, delete, inverse, outputFormat);
                    }
                    else
                    {
                        success = datdata.RebuildGeneric(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), quickScan, date, delete, inverse, outputFormat, asFiles);
                    }

                    // If we have a success and we're updating the DAT, write it out
                    if (success && updateDat)
                    {
                        datdata.Header.FileName    = $"fixDAT_{Header.FileName}";
                        datdata.Header.Name        = $"fixDAT_{Header.Name}";
                        datdata.Header.Description = $"fixDAT_{Header.Description}";
                        datdata.Items.ClearMarked();
                        datdata.Write(OutputDir);
                    }
                }
            }

            // Otherwise, process all DATs into the same output
            else
            {
                InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");

                // Add all of the input DATs into one huge internal DAT
                DatFile datdata = DatFile.Create();
                foreach (ParentablePath datfile in datfilePaths)
                {
                    datdata.Parse(datfile, int.MaxValue, keep: true);
                }

                // Set depot information
                datdata.Header.InputDepot  = Header.InputDepot.Clone() as DepotInformation;
                datdata.Header.OutputDepot = Header.OutputDepot.Clone() as DepotInformation;

                // If we have overridden the header skipper, set it now
                if (!string.IsNullOrEmpty(Header.HeaderSkipper))
                {
                    datdata.Header.HeaderSkipper = Header.HeaderSkipper;
                }

                watch.Stop();

                // If we have the depot flag, respect it
                bool success;
                if (Header.InputDepot?.IsActive ?? false)
                {
                    success = datdata.RebuildDepot(Inputs, OutputDir, date, delete, inverse, outputFormat);
                }
                else
                {
                    success = datdata.RebuildGeneric(Inputs, OutputDir, quickScan, date, delete, inverse, outputFormat, asFiles);
                }

                // If we have a success and we're updating the DAT, write it out
                if (success && updateDat)
                {
                    datdata.Header.FileName    = $"fixDAT_{Header.FileName}";
                    datdata.Header.Name        = $"fixDAT_{Header.Name}";
                    datdata.Header.Description = $"fixDAT_{Header.Description}";
                    datdata.Items.ClearMarked();
                    datdata.Write(OutputDir);
                }
            }
        }
Exemplo n.º 20
0
        static void Main(string[] args)
        {
            string[] selection = { "Створити", "Відкрити", "Редагувати" };
            string[] doctype   = { "txt", "dat" };

            while (true)
            {
                Console.WriteLine(new string('-', 30));
                Console.Write("Введіть шлях до документу: ");
                string path = Console.ReadLine();
                Console.WriteLine(new string('-', 30));


                //Файл формату .txt або .dat
                AbstractHandler file;

                Console.WriteLine("Виберіть тип документу");
                for (int i = 0; i < doctype.Length; i++)
                {
                    Console.WriteLine($"{i + 1}.{doctype[i]}");
                }
                Console.Write("---> ");
                int n = Convert.ToInt32(Console.ReadLine());

                switch (n)
                {
                case 1:
                    file = new TxtFile(path);
                    break;

                case 2:
                    file = new DatFile(path);
                    break;

                default:
                    file = null;
                    break;
                }
                Console.WriteLine(new string('-', 30));

                Console.WriteLine("Виберіть, що зробити з документом");
                for (int i = 0; i < selection.Length; i++)
                {
                    Console.WriteLine($"{i + 1}.{selection[i]}");
                }
                Console.Write("---> ");
                int m = Convert.ToInt32(Console.ReadLine());

                switch (m)
                {
                case 1:
                    file.Create();
                    break;

                case 2:
                    file.Open();
                    break;

                case 3:
                    file.Edit();
                    break;
                }
                Console.WriteLine(new string('-', 30));

                Console.WriteLine("Для продовження натисніть ENTER, щоб увійти введіть Q");
                Console.Write("--->");
                string exit = Console.ReadLine();
                if (exit == "Q" || exit == "q")
                {
                    break;
                }
            }
        }
Exemplo n.º 21
0
        /// <summary>
        /// Split a DAT by best available hashes
        /// </summary>
        /// <param name="datFile">Current DatFile object to split</param>
        /// <returns>Dictionary of Field to DatFile mappings</returns>
        public static Dictionary <DatItemField, DatFile> SplitByHash(DatFile datFile)
        {
            // Create each of the respective output DATs
            InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by best available hashes");

            // Create the set of field-to-dat mappings
            Dictionary <DatItemField, DatFile> fieldDats = new Dictionary <DatItemField, DatFile>();

            // TODO: Can this be made into a loop?
            fieldDats[DatItemField.Status] = DatFile.Create(datFile.Header.CloneStandard());
            fieldDats[DatItemField.Status].Header.FileName    += " (Nodump)";
            fieldDats[DatItemField.Status].Header.Name        += " (Nodump)";
            fieldDats[DatItemField.Status].Header.Description += " (Nodump)";

            fieldDats[DatItemField.SHA512] = DatFile.Create(datFile.Header.CloneStandard());
            fieldDats[DatItemField.SHA512].Header.FileName    += " (SHA-512)";
            fieldDats[DatItemField.SHA512].Header.Name        += " (SHA-512)";
            fieldDats[DatItemField.SHA512].Header.Description += " (SHA-512)";

            fieldDats[DatItemField.SHA384] = DatFile.Create(datFile.Header.CloneStandard());
            fieldDats[DatItemField.SHA384].Header.FileName    += " (SHA-384)";
            fieldDats[DatItemField.SHA384].Header.Name        += " (SHA-384)";
            fieldDats[DatItemField.SHA384].Header.Description += " (SHA-384)";

            fieldDats[DatItemField.SHA256] = DatFile.Create(datFile.Header.CloneStandard());
            fieldDats[DatItemField.SHA256].Header.FileName    += " (SHA-256)";
            fieldDats[DatItemField.SHA256].Header.Name        += " (SHA-256)";
            fieldDats[DatItemField.SHA256].Header.Description += " (SHA-256)";

            fieldDats[DatItemField.SHA1] = DatFile.Create(datFile.Header.CloneStandard());
            fieldDats[DatItemField.SHA1].Header.FileName    += " (SHA-1)";
            fieldDats[DatItemField.SHA1].Header.Name        += " (SHA-1)";
            fieldDats[DatItemField.SHA1].Header.Description += " (SHA-1)";

            fieldDats[DatItemField.MD5] = DatFile.Create(datFile.Header.CloneStandard());
            fieldDats[DatItemField.MD5].Header.FileName    += " (MD5)";
            fieldDats[DatItemField.MD5].Header.Name        += " (MD5)";
            fieldDats[DatItemField.MD5].Header.Description += " (MD5)";

            fieldDats[DatItemField.CRC] = DatFile.Create(datFile.Header.CloneStandard());
            fieldDats[DatItemField.CRC].Header.FileName    += " (CRC)";
            fieldDats[DatItemField.CRC].Header.Name        += " (CRC)";
            fieldDats[DatItemField.CRC].Header.Description += " (CRC)";

            fieldDats[DatItemField.NULL] = DatFile.Create(datFile.Header.CloneStandard());
            fieldDats[DatItemField.NULL].Header.FileName    += " (Other)";
            fieldDats[DatItemField.NULL].Header.Name        += " (Other)";
            fieldDats[DatItemField.NULL].Header.Description += " (Other)";

            // Now populate each of the DAT objects in turn
            Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
            {
                ConcurrentList <DatItem> items = datFile.Items[key];
                foreach (DatItem item in items)
                {
                    // If the file is not a Disk, Media, or Rom, continue
                    if (item.ItemType != ItemType.Disk && item.ItemType != ItemType.Media && item.ItemType != ItemType.Rom)
                    {
                        return;
                    }

                    // If the file is a nodump
                    if ((item.ItemType == ItemType.Rom && (item as Rom).ItemStatus == ItemStatus.Nodump) ||
                        (item.ItemType == ItemType.Disk && (item as Disk).ItemStatus == ItemStatus.Nodump))
                    {
                        fieldDats[DatItemField.Status].Items.Add(key, item);
                    }

                    // If the file has a SHA-512
                    else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).SHA512)))
                    {
                        fieldDats[DatItemField.SHA512].Items.Add(key, item);
                    }

                    // If the file has a SHA-384
                    else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).SHA384)))
                    {
                        fieldDats[DatItemField.SHA384].Items.Add(key, item);
                    }

                    // If the file has a SHA-256
                    else if ((item.ItemType == ItemType.Media && !string.IsNullOrWhiteSpace((item as Media).SHA256)) ||
                             (item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).SHA256)))
                    {
                        fieldDats[DatItemField.SHA256].Items.Add(key, item);
                    }

                    // If the file has a SHA-1
                    else if ((item.ItemType == ItemType.Disk && !string.IsNullOrWhiteSpace((item as Disk).SHA1)) ||
                             (item.ItemType == ItemType.Media && !string.IsNullOrWhiteSpace((item as Media).SHA1)) ||
                             (item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).SHA1)))
                    {
                        fieldDats[DatItemField.SHA1].Items.Add(key, item);
                    }

                    // If the file has an MD5
                    else if ((item.ItemType == ItemType.Disk && !string.IsNullOrWhiteSpace((item as Disk).MD5)) ||
                             (item.ItemType == ItemType.Media && !string.IsNullOrWhiteSpace((item as Media).MD5)) ||
                             (item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).MD5)))
                    {
                        fieldDats[DatItemField.MD5].Items.Add(key, item);
                    }

                    // If the file has a CRC
                    else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).CRC)))
                    {
                        fieldDats[DatItemField.CRC].Items.Add(key, item);
                    }

                    else
                    {
                        fieldDats[DatItemField.NULL].Items.Add(key, item);
                    }
                }
            });

            watch.Stop();
            return(fieldDats);
        }
Exemplo n.º 22
0
        public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features)
        {
            base.ProcessFeatures(features);

            // Try to read each input as a batch run file
            foreach (string path in Inputs)
            {
                // If the file doesn't exist, warn but continue
                if (!File.Exists(path))
                {
                    logger.User($"{path} does not exist. Skipping...");
                    continue;
                }

                // Try to process the file now
                try
                {
                    // Every line is its own command
                    string[] lines = File.ReadAllLines(path);

                    // Each batch file has its own state
                    int     index           = 0;
                    DatFile datFile         = DatFile.Create();
                    string  outputDirectory = null;

                    // Process each command line
                    foreach (string line in lines)
                    {
                        // Skip empty lines
                        if (string.IsNullOrWhiteSpace(line))
                        {
                            continue;
                        }

                        // Skip lines that start with REM or #
                        if (line.StartsWith("REM") || line.StartsWith("#"))
                        {
                            continue;
                        }

                        // Read the command in, if possible
                        var command = BatchCommand.Create(line);
                        if (command == null)
                        {
                            logger.User($"Could not process {path} due to the following line: {line}");
                            break;
                        }

                        // Now switch on the command
                        logger.User($"Attempting to invoke {command.Name} with {(command.Arguments.Count == 0 ? "no arguments" : "the following argument(s): " + string.Join(", ", command.Arguments))}");
                        switch (command.Name.ToLowerInvariant())
                        {
                        // Set a header field
                        case "set":
                            if (command.Arguments.Count != 2)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: set(header.field, value);");
                                continue;
                            }

                            // Read in the individual arguments
                            Field  field = command.Arguments[0].AsField();
                            string value = command.Arguments[1];

                            // If we had an invalid input, log and continue
                            if (field == Field.NULL)
                            {
                                logger.User($"{command.Arguments[0]} was an invalid field name");
                                continue;
                            }

                            // Set the header field
                            datFile.Header.SetFields(new Dictionary <Field, string> {
                                [field] = value
                            });

                            break;

                        // Parse in new input file(s)
                        case "input":
                            if (command.Arguments.Count == 0)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: input(datpath, ...);");
                                continue;
                            }

                            // Get only files from inputs
                            List <ParentablePath> datFilePaths = DirectoryExtensions.GetFilesOnly(command.Arguments);

                            // Assume there could be multiple
                            foreach (ParentablePath datFilePath in datFilePaths)
                            {
                                datFile.Parse(datFilePath, index++);
                            }

                            break;

                        // Run DFD/D2D on path(s)
                        case "d2d":
                        case "dfd":
                            if (command.Arguments.Count == 0)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: d2d(path, ...);");
                                continue;
                            }

                            // TODO: Should any of the other options be added for D2D?

                            // Assume there could be multiple
                            foreach (string input in command.Arguments)
                            {
                                datFile.PopulateFromDir(input);
                            }

                            // TODO: We might not want to remove higher order hashes in the future
                            // TODO: We might not want to remove dates in the future
                            Cleaner dfdCleaner = new Cleaner()
                            {
                                ExcludeFields = Hash.DeepHashes.AsFields()
                            };
                            dfdCleaner.ExcludeFields.Add(Field.DatItem_Date);
                            datFile.ApplyCleaning(dfdCleaner);

                            break;

                        // Apply a filter
                        case "filter":
                            if (command.Arguments.Count < 2 || command.Arguments.Count > 4)
                            {
                                logger.User($"Invoked {command.Name} and expected between 2-4 arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: filter(field, value, [remove = false, [perMachine = false]]);");
                                continue;
                            }

                            // Read in the individual arguments
                            Field  filterField  = command.Arguments[0].AsField();
                            string filterValue  = command.Arguments[1];
                            bool?  filterRemove = false;
                            if (command.Arguments.Count >= 3)
                            {
                                filterRemove = command.Arguments[2].AsYesNo();
                            }
                            bool?filterPerMachine = false;
                            if (command.Arguments.Count >= 4)
                            {
                                filterPerMachine = command.Arguments[3].AsYesNo();
                            }

                            // If we had an invalid input, log and continue
                            if (filterField == Field.NULL)
                            {
                                logger.User($"{command.Arguments[0]} was an invalid field name");
                                continue;
                            }
                            if (filterRemove == null)
                            {
                                logger.User($"{command.Arguments[2]} was an invalid true/false value");
                                continue;
                            }
                            if (filterPerMachine == null)
                            {
                                logger.User($"{command.Arguments[3]} was an invalid true/false value");
                                continue;
                            }

                            // Create a filter with this new set of fields
                            Filter filter = new Filter();
                            filter.SetFilter(filterField, filterValue, filterRemove.Value);

                            // Apply the filter blindly
                            datFile.ApplyFilter(filter, filterPerMachine.Value);

                            // Cleanup after the filter
                            // TODO: We might not want to remove immediately
                            datFile.Items.ClearMarked();
                            datFile.Items.ClearEmpty();

                            break;

                        // Apply an extra INI
                        case "extra":
                            if (command.Arguments.Count != 2)
                            {
                                logger.User($"Invoked {command.Name} and expected 2 arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: extra(field, inipath);");
                                continue;
                            }

                            // Read in the individual arguments
                            Field  extraField = command.Arguments[0].AsField();
                            string extraFile  = command.Arguments[1];

                            // If we had an invalid input, log and continue
                            if (extraField == Field.NULL)
                            {
                                logger.User($"{command.Arguments[0]} was an invalid field name");
                                continue;
                            }
                            if (!File.Exists(command.Arguments[1]))
                            {
                                logger.User($"{command.Arguments[1]} was an invalid file name");
                                continue;
                            }

                            // Create the extra INI
                            ExtraIni     extraIni     = new ExtraIni();
                            ExtraIniItem extraIniItem = new ExtraIniItem();
                            extraIniItem.PopulateFromFile(extraFile);
                            extraIniItem.Field = extraField;
                            extraIni.Items.Add(extraIniItem);

                            // Apply the extra INI blindly
                            datFile.ApplyExtras(extraIni);

                            break;

                        // Apply internal split/merge
                        case "merge":
                            if (command.Arguments.Count != 1)
                            {
                                logger.User($"Invoked {command.Name} and expected 1 argument, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: merge(split|merged|nonmerged|full|device);");
                                continue;
                            }

                            // Read in the individual arguments
                            MergingFlag mergingFlag = command.Arguments[0].AsMergingFlag();

                            // If we had an invalid input, log and continue
                            if (mergingFlag == MergingFlag.None)
                            {
                                logger.User($"{command.Arguments[0]} was an invalid merging flag");
                                continue;
                            }

                            // Apply the merging flag
                            datFile.ApplySplitting(mergingFlag, false);

                            break;

                        // Apply description-as-name logic
                        case "descname":
                            if (command.Arguments.Count != 0)
                            {
                                logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: descname();");
                                continue;
                            }

                            // Apply the logic
                            datFile.MachineDescriptionToName();

                            break;

                        // Apply 1G1R
                        case "1g1r":
                            if (command.Arguments.Count == 0)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: 1g1r(region, ...);");
                                continue;
                            }

                            // Run the 1G1R functionality
                            datFile.OneGamePerRegion(command.Arguments);

                            break;

                        // Apply one rom per game (ORPG)
                        case "orpg":
                            if (command.Arguments.Count != 0)
                            {
                                logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: orpg();");
                                continue;
                            }

                            // Apply the logic
                            datFile.OneRomPerGame();

                            break;

                        // Remove a field
                        case "remove":
                            if (command.Arguments.Count == 0)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: remove(field, ...);");
                                continue;
                            }

                            // Run the removal functionality
                            datFile.RemoveFieldsFromItems(command.Arguments.Select(s => s.AsField()).ToList());

                            break;

                        // Apply scene date stripping
                        case "sds":
                            if (command.Arguments.Count != 0)
                            {
                                logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: sds();");
                                continue;
                            }

                            // Apply the logic
                            datFile.StripSceneDatesFromItems();

                            break;

                        // Set new output format(s)
                        case "format":
                            if (command.Arguments.Count == 0)
                            {
                                logger.User($"Invoked {command.Name} but no arguments were provided");
                                logger.User("Usage: format(datformat, ...);");
                                continue;
                            }

                            // Assume there could be multiple
                            datFile.Header.DatFormat = 0x00;
                            foreach (string format in command.Arguments)
                            {
                                datFile.Header.DatFormat |= format.AsDatFormat();
                            }

                            // If we had an invalid input, log and continue
                            if (datFile.Header.DatFormat == 0x00)
                            {
                                logger.User($"No valid output format found");
                                continue;
                            }

                            break;

                        // Set output directory
                        case "output":
                            if (command.Arguments.Count != 1)
                            {
                                logger.User($"Invoked {command.Name} and expected exactly 1 argument, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: output(outdir);");
                                continue;
                            }

                            // Only set the first as the output directory
                            outputDirectory = command.Arguments[0];
                            break;

                        // Write out the current DatFile
                        case "write":
                            if (command.Arguments.Count > 1)
                            {
                                logger.User($"Invoked {command.Name} and expected 0-1 arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: write([overwrite = true]);");
                                continue;
                            }

                            // Get overwrite value, if possible
                            bool?overwrite = true;
                            if (command.Arguments.Count == 1)
                            {
                                overwrite = command.Arguments[0].AsYesNo();
                            }

                            // If we had an invalid input, log and continue
                            if (overwrite == null)
                            {
                                logger.User($"{command.Arguments[0]} was an invalid true/false value");
                                continue;
                            }

                            // Write out the dat with the current state
                            datFile.Write(outputDirectory, overwrite: overwrite.Value);
                            break;

                        // Reset the internal state
                        case "reset":
                            if (command.Arguments.Count != 0)
                            {
                                logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided");
                                logger.User("Usage: reset();");
                                continue;
                            }

                            // Reset all state variables
                            index           = 0;
                            datFile         = DatFile.Create();
                            outputDirectory = null;
                            break;

                        default:
                            logger.User($"Could not find a match for '{command.Name}'. Please see the help text for more details.");
                            break;
                        }
                    }
                }
                catch (Exception ex)
                {
                    logger.Error(ex, $"There was an exception processing {path}");
                    continue;
                }
            }
        }
Exemplo n.º 23
0
        public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features)
        {
            base.ProcessFeatures(features);

            // Get feature flags
            var updateFields = GetUpdateFields(features);
            var updateMode   = GetUpdateMode(features);

            // Normalize the extensions
            Header.AddExtension = (string.IsNullOrWhiteSpace(Header.AddExtension) || Header.AddExtension.StartsWith(".")
                ? Header.AddExtension
                : $".{Header.AddExtension}");
            Header.ReplaceExtension = (string.IsNullOrWhiteSpace(Header.ReplaceExtension) || Header.ReplaceExtension.StartsWith(".")
                ? Header.ReplaceExtension
                : $".{Header.ReplaceExtension}");

            // If we're in a special update mode and the names aren't set, set defaults
            if (updateMode != 0)
            {
                // Get the values that will be used
                if (string.IsNullOrWhiteSpace(Header.Date))
                {
                    Header.Date = DateTime.Now.ToString("yyyy-MM-dd");
                }

                if (string.IsNullOrWhiteSpace(Header.Name))
                {
                    Header.Name = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
                                  + (Header.Type == "SuperDAT" ? "-SuperDAT" : string.Empty)
                                  + (Cleaner.DedupeRoms != DedupeType.None ? "-deduped" : string.Empty);
                }

                if (string.IsNullOrWhiteSpace(Header.Description))
                {
                    Header.Description = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
                                         + (Header.Type == "SuperDAT" ? "-SuperDAT" : string.Empty)
                                         + (Cleaner.DedupeRoms != DedupeType.None ? " - deduped" : string.Empty);

                    if (!GetBoolean(features, NoAutomaticDateValue))
                    {
                        Header.Description += $" ({Header.Date})";
                    }
                }

                if (string.IsNullOrWhiteSpace(Header.Category) && updateMode != 0)
                {
                    Header.Category = "DiffDAT";
                }

                if (string.IsNullOrWhiteSpace(Header.Author))
                {
                    Header.Author = "SabreTools";
                }
            }

            // If no update fields are set, default to Names
            if (updateFields == null || updateFields.Count == 0)
            {
                updateFields = new List <Field>()
                {
                    Field.DatItem_Name
                }
            }
            ;

            // Ensure we only have files in the inputs
            List <ParentablePath> inputPaths = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);
            List <ParentablePath> basePaths  = DirectoryExtensions.GetFilesOnly(GetList(features, BaseDatListValue));

            // If we're in standard update mode, run through all of the inputs
            if (updateMode == UpdateMode.None)
            {
                // Loop through each input and update
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Create a new base DatFile
                    DatFile datFile = DatFile.Create(Header);
                    logger.User($"Processing '{Path.GetFileName(inputPath.CurrentPath)}'");
                    datFile.Parse(inputPath, keep: true,
                                  keepext: datFile.Header.DatFormat.HasFlag(DatFormat.TSV) ||
                                  datFile.Header.DatFormat.HasFlag(DatFormat.CSV) ||
                                  datFile.Header.DatFormat.HasFlag(DatFormat.SSV));

                    // Perform additional processing steps
                    datFile.ApplyExtras(Extras);
                    datFile.ApplySplitting(GetSplitType(features), false);
                    datFile.ApplyFilter(Filter);
                    datFile.ApplyCleaning(Cleaner);

                    // Get the correct output path
                    string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file, overwriting only if it's not in the current directory
                    datFile.Write(realOutDir, overwrite: GetBoolean(features, InplaceValue));
                });

                return;
            }

            // Reverse inputs if we're in a required mode
            if (updateMode.HasFlag(UpdateMode.DiffReverseCascade))
            {
                updateMode |= UpdateMode.DiffCascade;
                inputPaths.Reverse();
            }
            if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace))
            {
                updateMode |= UpdateMode.BaseReplace;
                basePaths.Reverse();
            }

            // Create a DAT to capture inputs
            DatFile userInputDat = DatFile.Create(Header);

            // Populate using the correct set
            List <DatHeader> datHeaders;

            if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace))
            {
                datHeaders = userInputDat.PopulateUserData(basePaths);
            }
            else
            {
                datHeaders = userInputDat.PopulateUserData(inputPaths);
            }

            // Perform additional processing steps
            userInputDat.ApplyExtras(Extras);
            userInputDat.ApplySplitting(GetSplitType(features), false);
            userInputDat.ApplyFilter(Filter);
            userInputDat.ApplyCleaning(Cleaner);

            // Output only DatItems that are duplicated across inputs
            if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
            {
                DatFile dupeData = userInputDat.DiffDuplicates(inputPaths);

                InternalStopwatch watch = new InternalStopwatch("Outputting duplicate DAT");
                dupeData.Write(OutputDir, overwrite: false);
                watch.Stop();
            }

            // Output only DatItems that are not duplicated across inputs
            if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly))
            {
                DatFile outerDiffData = userInputDat.DiffNoDuplicates(inputPaths);

                InternalStopwatch watch = new InternalStopwatch("Outputting no duplicate DAT");
                outerDiffData.Write(OutputDir, overwrite: false);
                watch.Stop();
            }

            // Output only DatItems that are unique to each input
            if (updateMode.HasFlag(UpdateMode.DiffIndividualsOnly))
            {
                // Get all of the output DatFiles
                List <DatFile> datFiles = userInputDat.DiffIndividuals(inputPaths);

                // Loop through and output the new DatFiles
                InternalStopwatch watch = new InternalStopwatch("Outputting all individual DATs");

                Parallel.For(0, inputPaths.Count, Globals.ParallelOptions, j =>
                {
                    string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file
                    datFiles[j].Write(path, overwrite: GetBoolean(features, InplaceValue));
                });

                watch.Stop();
            }

            // Output cascaded diffs
            if (updateMode.HasFlag(UpdateMode.DiffCascade))
            {
                // Preprocess the DatHeaders
                Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
                {
                    // If we're outputting to the runtime folder, rename
                    if (!GetBoolean(features, InplaceValue) && OutputDir == Environment.CurrentDirectory)
                    {
                        string innerpost = $" ({j} - {inputPaths[j].GetNormalizedFileName(true)} Only)";

                        datHeaders[j]              = userInputDat.Header;
                        datHeaders[j].FileName    += innerpost;
                        datHeaders[j].Name        += innerpost;
                        datHeaders[j].Description += innerpost;
                    }
                });

                // Get all of the output DatFiles
                List <DatFile> datFiles = userInputDat.DiffCascade(datHeaders);

                // Loop through and output the new DatFiles
                InternalStopwatch watch = new InternalStopwatch("Outputting all created DATs");

                int startIndex = GetBoolean(features, SkipFirstOutputValue) ? 1 : 0;
                Parallel.For(startIndex, inputPaths.Count, Globals.ParallelOptions, j =>
                {
                    string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file
                    datFiles[j].Write(path, overwrite: GetBoolean(features, InplaceValue));
                });

                watch.Stop();
            }

            // Output differences against a base DAT
            if (updateMode.HasFlag(UpdateMode.DiffAgainst))
            {
                // Loop through each input and diff against the base
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Parse the path to a new DatFile
                    DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
                    repDat.Parse(inputPath, indexId: 1, keep: true);

                    // Perform additional processing steps
                    repDat.ApplyExtras(Extras);
                    repDat.ApplySplitting(GetSplitType(features), false);
                    repDat.ApplyFilter(Filter);
                    repDat.ApplyCleaning(Cleaner);

                    // Now replace the fields from the base DatFile
                    userInputDat.DiffAgainst(repDat, GetBoolean(Features, ByGameValue));

                    // Finally output the diffed DatFile
                    string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
                    repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue));
                });
            }

            // Output DATs after replacing fields from a base DatFile
            if (updateMode.HasFlag(UpdateMode.BaseReplace))
            {
                // Loop through each input and apply the base DatFile
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Parse the path to a new DatFile
                    DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
                    repDat.Parse(inputPath, indexId: 1, keep: true);

                    // Perform additional processing steps
                    repDat.ApplyExtras(Extras);
                    repDat.ApplySplitting(GetSplitType(features), false);
                    repDat.ApplyFilter(Filter);
                    repDat.ApplyCleaning(Cleaner);

                    // Now replace the fields from the base DatFile
                    userInputDat.BaseReplace(repDat, updateFields, GetBoolean(features, OnlySameValue));

                    // Finally output the replaced DatFile
                    string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
                    repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue));
                });
            }

            // Merge all input files and write
            // This has to be last due to the SuperDAT handling
            if (updateMode.HasFlag(UpdateMode.Merge))
            {
                // If we're in SuperDAT mode, prefix all games with their respective DATs
                if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase))
                {
                    userInputDat.ApplySuperDAT(inputPaths);
                }

                userInputDat.Write(OutputDir);
            }
        }
    }
Exemplo n.º 24
0
        public override void ProcessFeatures(Dictionary <string, Feature> features)
        {
            base.ProcessFeatures(features);

            // Get a list of files from the input datfiles
            var datfiles     = GetList(features, DatListValue);
            var datfilePaths = DirectoryExtensions.GetFilesOnly(datfiles);

            // Get feature flags
            TreatAsFile asFiles   = GetTreatAsFiles(features);
            bool        hashOnly  = GetBoolean(features, HashOnlyValue);
            bool        quickScan = GetBoolean(features, QuickValue);
            var         splitType = GetSplitType(features);

            // If we are in individual mode, process each DAT on their own
            if (GetBoolean(features, IndividualValue))
            {
                foreach (ParentablePath datfile in datfilePaths)
                {
                    // Parse in from the file
                    DatFile datdata = DatFile.Create();
                    datdata.Parse(datfile, int.MaxValue, keep: true);

                    // Perform additional processing steps
                    datdata.ApplyExtras(Extras);
                    datdata.ApplySplitting(splitType, true);
                    datdata.ApplyFilter(Filter);
                    datdata.ApplyCleaning(Cleaner);

                    // Set depot information
                    datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation;

                    // If we have overridden the header skipper, set it now
                    if (!string.IsNullOrEmpty(Header.HeaderSkipper))
                    {
                        datdata.Header.HeaderSkipper = Header.HeaderSkipper;
                    }

                    // If we have the depot flag, respect it
                    if (Header.InputDepot?.IsActive ?? false)
                    {
                        datdata.VerifyDepot(Inputs);
                    }
                    else
                    {
                        // Loop through and add the inputs to check against
                        logger.User("Processing files:\n");
                        foreach (string input in Inputs)
                        {
                            datdata.PopulateFromDir(input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
                        }

                        datdata.VerifyGeneric(hashOnly);
                    }

                    // Now write out if there are any items left
                    datdata.WriteStatsToConsole();
                    datdata.Write(OutputDir);
                }
            }
            // Otherwise, process all DATs into the same output
            else
            {
                InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");

                // Add all of the input DATs into one huge internal DAT
                DatFile datdata = DatFile.Create();
                foreach (ParentablePath datfile in datfilePaths)
                {
                    datdata.Parse(datfile, int.MaxValue, keep: true);
                }

                // Perform additional processing steps
                datdata.ApplyExtras(Extras);
                datdata.ApplySplitting(splitType, true);
                datdata.ApplyFilter(Filter);
                datdata.ApplyCleaning(Cleaner);

                // Set depot information
                datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation;

                // If we have overridden the header skipper, set it now
                if (!string.IsNullOrEmpty(Header.HeaderSkipper))
                {
                    datdata.Header.HeaderSkipper = Header.HeaderSkipper;
                }

                watch.Stop();

                // If we have the depot flag, respect it
                if (Header.InputDepot?.IsActive ?? false)
                {
                    datdata.VerifyDepot(Inputs);
                }
                else
                {
                    // Loop through and add the inputs to check against
                    logger.User("Processing files:\n");
                    foreach (string input in Inputs)
                    {
                        datdata.PopulateFromDir(input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
                    }

                    datdata.VerifyGeneric(hashOnly);
                }

                // Now write out if there are any items left
                datdata.WriteStatsToConsole();
                datdata.Write(OutputDir);
            }
        }
Exemplo n.º 25
0
        /// <summary>
        /// Output non-cascading diffs
        /// </summary>
        /// <param name="datFile">Current DatFile object to use for updating</param>
        /// <param name="inputs">List of inputs to write out from</param>
        public static List <DatFile> DiffIndividuals(DatFile datFile, List <ParentablePath> inputs)
        {
            InternalStopwatch watch = new InternalStopwatch("Initializing all individual DATs");

            // Fill in any information not in the base DAT
            if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
            {
                datFile.Header.FileName = "All DATs";
            }

            if (string.IsNullOrWhiteSpace(datFile.Header.Name))
            {
                datFile.Header.Name = "All DATs";
            }

            if (string.IsNullOrWhiteSpace(datFile.Header.Description))
            {
                datFile.Header.Description = "All DATs";
            }

            // Loop through each of the inputs and get or create a new DatData object
            DatFile[] outDatsArray = new DatFile[inputs.Count];

            Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
            {
                string innerpost             = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
                DatFile diffData             = DatFile.Create(datFile.Header);
                diffData.Header.FileName    += innerpost;
                diffData.Header.Name        += innerpost;
                diffData.Header.Description += innerpost;
                diffData.Items  = new ItemDictionary();
                outDatsArray[j] = diffData;
            });

            // Create a list of DatData objects representing individual output files
            List <DatFile> outDats = outDatsArray.ToList();

            watch.Stop();

            // Now, loop through the dictionary and populate the correct DATs
            watch.Start("Populating all individual DATs");

            Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
            {
                ConcurrentList <DatItem> items = DatItem.Merge(datFile.Items[key]);

                // If the rom list is empty or null, just skip it
                if (items == null || items.Count == 0)
                {
                    return;
                }

                // Loop through and add the items correctly
                foreach (DatItem item in items)
                {
                    if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
                    {
                        outDats[item.Source.Index].Items.Add(key, item);
                    }
                }
            });

            watch.Stop();

            return(outDats.ToList());
        }
Exemplo n.º 26
0
        public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features)
        {
            base.ProcessFeatures(features);

            // Get feature flags
            bool        addBlankFiles   = GetBoolean(features, AddBlankFilesValue);
            bool        addFileDates    = GetBoolean(features, AddDateValue);
            TreatAsFile asFiles         = GetTreatAsFiles(features);
            bool        noAutomaticDate = GetBoolean(features, NoAutomaticDateValue);
            var         includeInScan   = GetIncludeInScan(features);
            var         skipFileType    = GetSkipFileType(features);
            var         splitType       = GetSplitType(features);

            // Apply the specialized field removals to the cleaner
            if (Cleaner.ExcludeFields == null)
            {
                Cleaner.ExcludeFields = new List <Field>();
            }

            if (!addFileDates)
            {
                Cleaner.ExcludeFields.Add(Field.DatItem_Date);
            }

            // Create a new DATFromDir object and process the inputs
            DatFile basedat = DatFile.Create(Header);

            basedat.Header.Date = DateTime.Now.ToString("yyyy-MM-dd");

            // For each input directory, create a DAT
            foreach (string path in Inputs)
            {
                if (Directory.Exists(path) || File.Exists(path))
                {
                    // Clone the base Dat for information
                    DatFile datdata = DatFile.Create(basedat.Header);

                    // Get the base path and fill the header, if needed
                    string basePath = Path.GetFullPath(path);
                    datdata.FillHeaderFromPath(basePath, noAutomaticDate);

                    // Now populate from the path
                    bool success = datdata.PopulateFromDir(
                        basePath,
                        asFiles,
                        skipFileType,
                        addBlankFiles,
                        hashes: includeInScan);

                    if (success)
                    {
                        // Perform additional processing steps
                        datdata.ApplyExtras(Extras);
                        datdata.ApplySplitting(splitType, false);
                        datdata.ApplyFilter(Filter);
                        datdata.ApplyCleaning(Cleaner);

                        // Write out the file
                        datdata.Write(OutputDir);
                    }
                    else
                    {
                        Console.WriteLine();
                        OutputRecursive(0);
                    }
                }
            }
        }
Exemplo n.º 27
0
        public override bool ProcessFeatures(Dictionary <string, Help.Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get feature flags
            bool        addBlankFiles   = GetBoolean(features, AddBlankFilesValue);
            bool        addFileDates    = GetBoolean(features, AddDateValue);
            TreatAsFile asFiles         = GetTreatAsFiles(features);
            bool        noAutomaticDate = GetBoolean(features, NoAutomaticDateValue);
            var         includeInScan   = GetIncludeInScan(features);
            var         skipFileType    = GetSkipFileType(features);

            // Apply the specialized field removals to the cleaner
            if (!addFileDates)
            {
                Remover.PopulateExclusionsFromList(new List <string> {
                    "DatItem.Date"
                });
            }

            // Create a new DATFromDir object and process the inputs
            DatFile basedat = DatFile.Create(Header);

            basedat.Header.Date = DateTime.Now.ToString("yyyy-MM-dd");

            // For each input directory, create a DAT
            foreach (string path in Inputs)
            {
                if (Directory.Exists(path) || File.Exists(path))
                {
                    // Clone the base Dat for information
                    DatFile datdata = DatFile.Create(basedat.Header);

                    // Get the base path and fill the header, if needed
                    string basePath = Path.GetFullPath(path);
                    datdata.FillHeaderFromPath(basePath, noAutomaticDate);

                    // Now populate from the path
                    bool success = DatTools.DatFromDir.PopulateFromDir(
                        datdata,
                        basePath,
                        asFiles,
                        skipFileType,
                        addBlankFiles,
                        hashes: includeInScan);

                    if (success)
                    {
                        // Perform additional processing steps
                        Extras.ApplyExtras(datdata);
                        Splitter.ApplySplitting(datdata, false);
                        Filter.ApplyFilters(datdata);
                        Cleaner.ApplyCleaning(datdata);
                        Remover.ApplyRemovals(datdata);

                        // Write out the file
                        Writer.Write(datdata, OutputDir);
                    }
                    else
                    {
                        Console.WriteLine();
                        OutputRecursive(0);
                    }
                }
            }

            return(true);
        }
Exemplo n.º 28
0
        /// <summary>
        /// Split a DAT by size of Rom
        /// </summary>
        /// <param name="datFile">Current DatFile object to split</param>
        /// <param name="chunkSize">Long value representing the total size to split at</param>
        /// <returns>Less Than and Greater Than DatFiles</returns>
        public static List <DatFile> SplitByTotalSize(DatFile datFile, long chunkSize)
        {
            // If the size is invalid, just return
            if (chunkSize <= 0)
            {
                return(new List <DatFile>());
            }

            // Create each of the respective output DATs
            InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by total size");

            // Sort the DatFile by machine name
            datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);

            // Get the keys in a known order for easier sorting
            var keys = datFile.Items.SortedKeys;

            // Get the output list
            List <DatFile> datFiles = new List <DatFile>();

            // Initialize everything
            long    currentSize  = 0;
            long    currentIndex = 0;
            DatFile currentDat   = DatFile.Create(datFile.Header.CloneStandard());

            currentDat.Header.FileName    += $"_{currentIndex}";
            currentDat.Header.Name        += $"_{currentIndex}";
            currentDat.Header.Description += $"_{currentIndex}";

            // Loop through each machine
            foreach (string machine in keys)
            {
                // Get the current machine
                var items = datFile.Items[machine];
                if (items == null || !items.Any())
                {
                    logger.Error($"{machine} contains no items and will be skipped");
                    continue;
                }

                // Get the total size of the current machine
                long machineSize = 0;
                foreach (var item in items)
                {
                    if (item is Rom rom)
                    {
                        // TODO: Should there be more than just a log if a single item is larger than the chunksize?
                        machineSize += rom.Size ?? 0;
                        if ((rom.Size ?? 0) > chunkSize)
                        {
                            logger.Error($"{rom.GetName() ?? string.Empty} in {machine} is larger than {chunkSize}");
                        }
                    }
                }

                // If the current machine size is greater than the chunk size by itself, we want to log and skip
                // TODO: Should this eventually try to split the machine here?
                if (machineSize > chunkSize)
                {
                    logger.Error($"{machine} is larger than {chunkSize} and will be skipped");
                    continue;
                }

                // If the current machine size makes the current DatFile too big, split
                else if (currentSize + machineSize > chunkSize)
                {
                    datFiles.Add(currentDat);
                    currentSize = 0;
                    currentIndex++;
                    currentDat = DatFile.Create(datFile.Header.CloneStandard());
                    currentDat.Header.FileName    += $"_{currentIndex}";
                    currentDat.Header.Name        += $"_{currentIndex}";
                    currentDat.Header.Description += $"_{currentIndex}";
                }

                // Add the current machine to the current DatFile
                currentDat.Items[machine] = items;
                currentSize += machineSize;
            }

            // Add the final DatFile to the list
            datFiles.Add(currentDat);

            // Then return the list
            watch.Stop();
            return(datFiles);
        }
Exemplo n.º 29
0
        static void Main(string[] args)
        {
            string[] selection = { "Создать", "Открыть", "Редактировать" };
            string[] doctype   = { "txt", "dat" };

            while (true)
            {
                Console.WriteLine(new string('-', 30));
                Console.Write("Введите путь к документу: ");
                string path = Console.ReadLine();
                Console.WriteLine(new string('-', 30));


                //Файл формата .txt или .dat
                AbstractHandler file;

                Console.WriteLine("Выберите тип документа");
                for (int i = 0; i < doctype.Length; i++)
                {
                    Console.WriteLine($"{i + 1}.{doctype[i]}");
                }
                Console.Write("---> ");
                int n = Convert.ToInt32(Console.ReadLine());

                switch (n)
                {
                case 1:
                    file = new TxtFile(path);
                    break;

                case 2:
                    file = new DatFile(path);
                    break;

                default:
                    file = null;
                    break;
                }
                Console.WriteLine(new string('-', 30));

                Console.WriteLine("Выберите что сделать с документом");
                for (int i = 0; i < selection.Length; i++)
                {
                    Console.WriteLine($"{i + 1}.{selection[i]}");
                }
                Console.Write("---> ");
                int m = Convert.ToInt32(Console.ReadLine());

                switch (m)
                {
                case 1:
                    file.Create();
                    break;

                case 2:
                    file.Open();
                    break;

                case 3:
                    file.Edit();
                    break;
                }
                Console.WriteLine(new string('-', 30));

                Console.WriteLine("Для продолжения нажмите ENTER, чтобы выйти введите Q");
                Console.Write("--->");
                string exit = Console.ReadLine();
                if (exit == "Q" || exit == "q")
                {
                    break;
                }
            }
        }
Exemplo n.º 30
0
        public override bool ProcessFeatures(Dictionary <string, SabreTools.Help.Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get feature flags
            int    workers      = GetInt32(features, WorkersInt32Value);
            string missingSha1s = GetString(features, MissingSha1sStringValue);

            // Make sure the db is set
            if (string.IsNullOrWhiteSpace(_db))
            {
                _db = "db.sqlite";
                _connectionString = $"Data Source={_db};Version = 3;";
            }

            // Make sure the file exists
            if (!File.Exists(_db))
            {
                EnsureDatabase(_db, _connectionString);
            }

            // Make sure the dats dir is set
            if (string.IsNullOrWhiteSpace(_dats))
            {
                _dats = "dats";
            }

            _dats = Path.Combine(PathTool.GetRuntimeDirectory(), _dats);

            // Make sure the folder exists
            if (!Directory.Exists(_dats))
            {
                Directory.CreateDirectory(_dats);
            }

            // First get a list of SHA-1's from the input DATs
            DatFile datroot = DatFile.Create();

            datroot.Header.Type = "SuperDAT";
            DatFromDir.PopulateFromDir(datroot, _dats, asFiles: TreatAsFile.NonArchive, hashes: Hash.Standard);
            datroot.Items.BucketBy(ItemKey.SHA1, DedupeType.None);

            // Create a List of dat hashes in the database (SHA-1)
            List <string> databaseDats = new List <string>();
            List <string> unneeded     = new List <string>();

            SqliteConnection dbc = new SqliteConnection(_connectionString);

            dbc.Open();

            // Populate the List from the database
            InternalStopwatch watch = new InternalStopwatch("Populating the list of existing DATs");

            string           query = "SELECT DISTINCT hash FROM dat";
            SqliteCommand    slc   = new SqliteCommand(query, dbc);
            SqliteDataReader sldr  = slc.ExecuteReader();

            if (sldr.HasRows)
            {
                sldr.Read();
                string hash = sldr.GetString(0);
                if (datroot.Items.ContainsKey(hash))
                {
                    datroot.Items.Remove(hash);
                    databaseDats.Add(hash);
                }
                else if (!databaseDats.Contains(hash))
                {
                    unneeded.Add(hash);
                }
            }

            datroot.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);

            watch.Stop();

            slc.Dispose();
            sldr.Dispose();

            // Loop through the Dictionary and add all data
            watch.Start("Adding new DAT information");
            foreach (string key in datroot.Items.Keys)
            {
                foreach (Rom value in datroot.Items[key])
                {
                    AddDatToDatabase(value, dbc);
                }
            }

            watch.Stop();

            // Now loop through and remove all references to old Dats
            if (unneeded.Count > 0)
            {
                watch.Start("Removing unmatched DAT information");

                query = "DELETE FROM dat WHERE";
                foreach (string dathash in unneeded)
                {
                    query += $" OR hash=\"{dathash}\"";
                }

                query = query.Replace("WHERE OR", "WHERE");
                slc   = new SqliteCommand(query, dbc);
                slc.ExecuteNonQuery();
                slc.Dispose();

                watch.Stop();
            }

            dbc.Dispose();
            return(true);
        }