예제 #1
0
        public override bool ProcessFeatures(Dictionary <string, Help.Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get feature flags
            var updateDatItemFields = GetUpdateDatItemFields(features);
            var updateMachineFields = GetUpdateMachineFields(features);
            var updateMode          = GetUpdateMode(features);

            // Normalize the extensions
            Header.AddExtension = (string.IsNullOrWhiteSpace(Header.AddExtension) || Header.AddExtension.StartsWith(".")
                ? Header.AddExtension
                : $".{Header.AddExtension}");
            Header.ReplaceExtension = (string.IsNullOrWhiteSpace(Header.ReplaceExtension) || Header.ReplaceExtension.StartsWith(".")
                ? Header.ReplaceExtension
                : $".{Header.ReplaceExtension}");

            // If we're in a non-replacement special update mode and the names aren't set, set defaults
            if (updateMode != 0 &&
                !(updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace)))
            {
                // Get the values that will be used
                if (string.IsNullOrWhiteSpace(Header.Date))
                {
                    Header.Date = DateTime.Now.ToString("yyyy-MM-dd");
                }

                if (string.IsNullOrWhiteSpace(Header.Name))
                {
                    Header.Name = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
                                  + (Header.Type == "SuperDAT" ? "-SuperDAT" : string.Empty)
                                  + (Cleaner.DedupeRoms != DedupeType.None ? "-deduped" : string.Empty);
                }

                if (string.IsNullOrWhiteSpace(Header.Description))
                {
                    Header.Description = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
                                         + (Header.Type == "SuperDAT" ? "-SuperDAT" : string.Empty)
                                         + (Cleaner.DedupeRoms != DedupeType.None ? " - deduped" : string.Empty);

                    if (!GetBoolean(features, NoAutomaticDateValue))
                    {
                        Header.Description += $" ({Header.Date})";
                    }
                }

                if (string.IsNullOrWhiteSpace(Header.Category) && updateMode != 0)
                {
                    Header.Category = "DiffDAT";
                }

                if (string.IsNullOrWhiteSpace(Header.Author))
                {
                    Header.Author = $"SabreTools {Prepare.Version}";
                }

                if (string.IsNullOrWhiteSpace(Header.Comment))
                {
                    Header.Comment = $"Generated by SabreTools {Prepare.Version}";
                }
            }

            // If no update fields are set, default to Names
            if (updateDatItemFields == null || updateDatItemFields.Count == 0)
            {
                updateDatItemFields = new List <DatItemField>()
                {
                    DatItemField.Name
                }
            }
            ;

            // Ensure we only have files in the inputs
            List <ParentablePath> inputPaths = PathTool.GetFilesOnly(Inputs, appendparent: true);
            List <ParentablePath> basePaths  = PathTool.GetFilesOnly(GetList(features, BaseDatListValue));

            // Ensure the output directory
            OutputDir = OutputDir.Ensure();

            // If we're in standard update mode, run through all of the inputs
            if (updateMode == UpdateMode.None)
            {
                // Loop through each input and update
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Create a new base DatFile
                    DatFile datFile = DatFile.Create(Header);
                    logger.User($"Processing '{Path.GetFileName(inputPath.CurrentPath)}'");
                    Parser.ParseInto(datFile, inputPath, keep: true,
                                     keepext: datFile.Header.DatFormat.HasFlag(DatFormat.TSV) ||
                                     datFile.Header.DatFormat.HasFlag(DatFormat.CSV) ||
                                     datFile.Header.DatFormat.HasFlag(DatFormat.SSV));

                    // Perform additional processing steps
                    Extras.ApplyExtras(datFile);
                    Splitter.ApplySplitting(datFile, false);
                    Filter.ApplyFilters(datFile);
                    Cleaner.ApplyCleaning(datFile);
                    Remover.ApplyRemovals(datFile);

                    // Get the correct output path
                    string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file, overwriting only if it's not in the current directory
                    Writer.Write(datFile, realOutDir, overwrite: GetBoolean(features, InplaceValue));
                });

                return(true);
            }

            // Reverse inputs if we're in a required mode
            if (updateMode.HasFlag(UpdateMode.DiffReverseCascade))
            {
                updateMode |= UpdateMode.DiffCascade;
                inputPaths.Reverse();
            }
            if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace))
            {
                updateMode |= UpdateMode.BaseReplace;
                basePaths.Reverse();
            }

            // Create a DAT to capture inputs
            DatFile userInputDat = DatFile.Create(Header);

            // Populate using the correct set
            List <DatHeader> datHeaders;

            if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace))
            {
                datHeaders = DatFileTool.PopulateUserData(userInputDat, basePaths);
            }
            else
            {
                datHeaders = DatFileTool.PopulateUserData(userInputDat, inputPaths);
            }

            // Perform additional processing steps
            Extras.ApplyExtras(userInputDat);
            Splitter.ApplySplitting(userInputDat, false);
            Filter.ApplyFilters(userInputDat);
            Cleaner.ApplyCleaning(userInputDat);
            Remover.ApplyRemovals(userInputDat);

            // Output only DatItems that are duplicated across inputs
            if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
            {
                DatFile dupeData = DatFileTool.DiffDuplicates(userInputDat, inputPaths);

                InternalStopwatch watch = new InternalStopwatch("Outputting duplicate DAT");
                Writer.Write(dupeData, OutputDir, overwrite: false);
                watch.Stop();
            }

            // Output only DatItems that are not duplicated across inputs
            if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly))
            {
                DatFile outerDiffData = DatFileTool.DiffNoDuplicates(userInputDat, inputPaths);

                InternalStopwatch watch = new InternalStopwatch("Outputting no duplicate DAT");
                Writer.Write(outerDiffData, OutputDir, overwrite: false);
                watch.Stop();
            }

            // Output only DatItems that are unique to each input
            if (updateMode.HasFlag(UpdateMode.DiffIndividualsOnly))
            {
                // Get all of the output DatFiles
                List <DatFile> datFiles = DatFileTool.DiffIndividuals(userInputDat, inputPaths);

                // Loop through and output the new DatFiles
                InternalStopwatch watch = new InternalStopwatch("Outputting all individual DATs");

                Parallel.For(0, inputPaths.Count, Globals.ParallelOptions, j =>
                {
                    string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file
                    Writer.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue));
                });

                watch.Stop();
            }

            // Output cascaded diffs
            if (updateMode.HasFlag(UpdateMode.DiffCascade))
            {
                // Preprocess the DatHeaders
                Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
                {
                    // If we're outputting to the runtime folder, rename
                    if (!GetBoolean(features, InplaceValue) && OutputDir == Environment.CurrentDirectory)
                    {
                        string innerpost = $" ({j} - {inputPaths[j].GetNormalizedFileName(true)} Only)";

                        datHeaders[j]              = userInputDat.Header;
                        datHeaders[j].FileName    += innerpost;
                        datHeaders[j].Name        += innerpost;
                        datHeaders[j].Description += innerpost;
                    }
                });

                // Get all of the output DatFiles
                List <DatFile> datFiles = DatFileTool.DiffCascade(userInputDat, datHeaders);

                // Loop through and output the new DatFiles
                InternalStopwatch watch = new InternalStopwatch("Outputting all created DATs");

                int startIndex = GetBoolean(features, SkipFirstOutputValue) ? 1 : 0;
                Parallel.For(startIndex, inputPaths.Count, Globals.ParallelOptions, j =>
                {
                    string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));

                    // Try to output the file
                    Writer.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue));
                });

                watch.Stop();
            }

            // Output differences against a base DAT
            if (updateMode.HasFlag(UpdateMode.DiffAgainst))
            {
                // Loop through each input and diff against the base
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Parse the path to a new DatFile
                    DatFile repDat = DatFile.Create(Header);
                    Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);

                    // Perform additional processing steps
                    Extras.ApplyExtras(repDat);
                    Splitter.ApplySplitting(repDat, false);
                    Filter.ApplyFilters(repDat);
                    Cleaner.ApplyCleaning(repDat);
                    Remover.ApplyRemovals(repDat);

                    // Now replace the fields from the base DatFile
                    DatFileTool.DiffAgainst(userInputDat, repDat, GetBoolean(Features, ByGameValue));

                    // Finally output the diffed DatFile
                    string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
                    Writer.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue));
                });
            }

            // Output DATs after replacing fields from a base DatFile
            if (updateMode.HasFlag(UpdateMode.BaseReplace))
            {
                // Loop through each input and apply the base DatFile
                Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
                {
                    // Parse the path to a new DatFile
                    DatFile repDat = DatFile.Create(Header);
                    Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);

                    // Perform additional processing steps
                    Extras.ApplyExtras(repDat);
                    Splitter.ApplySplitting(repDat, false);
                    Filter.ApplyFilters(repDat);
                    Cleaner.ApplyCleaning(repDat);
                    Remover.ApplyRemovals(repDat);

                    // Now replace the fields from the base DatFile
                    DatFileTool.BaseReplace(
                        userInputDat,
                        repDat,
                        updateMachineFields,
                        updateDatItemFields,
                        GetBoolean(features, OnlySameValue));

                    // Finally output the replaced DatFile
                    string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
                    Writer.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue));
                });
            }

            // Merge all input files and write
            // This has to be last due to the SuperDAT handling
            if (updateMode.HasFlag(UpdateMode.Merge))
            {
                // If we're in SuperDAT mode, prefix all games with their respective DATs
                if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase))
                {
                    DatFileTool.ApplySuperDAT(userInputDat, inputPaths);
                }

                Writer.Write(userInputDat, OutputDir);
            }

            return(true);
        }
    }
예제 #2
0
        public override bool ProcessFeatures(Dictionary <string, Help.Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get feature flags
            bool        addBlankFiles   = GetBoolean(features, AddBlankFilesValue);
            bool        addFileDates    = GetBoolean(features, AddDateValue);
            TreatAsFile asFiles         = GetTreatAsFiles(features);
            bool        noAutomaticDate = GetBoolean(features, NoAutomaticDateValue);
            var         includeInScan   = GetIncludeInScan(features);
            var         skipFileType    = GetSkipFileType(features);

            // Apply the specialized field removals to the cleaner
            if (!addFileDates)
            {
                Remover.PopulateExclusionsFromList(new List <string> {
                    "DatItem.Date"
                });
            }

            // Create a new DATFromDir object and process the inputs
            DatFile basedat = DatFile.Create(Header);

            basedat.Header.Date = DateTime.Now.ToString("yyyy-MM-dd");

            // For each input directory, create a DAT
            foreach (string path in Inputs)
            {
                if (Directory.Exists(path) || File.Exists(path))
                {
                    // Clone the base Dat for information
                    DatFile datdata = DatFile.Create(basedat.Header);

                    // Get the base path and fill the header, if needed
                    string basePath = Path.GetFullPath(path);
                    datdata.FillHeaderFromPath(basePath, noAutomaticDate);

                    // Now populate from the path
                    bool success = DatTools.DatFromDir.PopulateFromDir(
                        datdata,
                        basePath,
                        asFiles,
                        skipFileType,
                        addBlankFiles,
                        hashes: includeInScan);

                    if (success)
                    {
                        // Perform additional processing steps
                        Extras.ApplyExtras(datdata);
                        Splitter.ApplySplitting(datdata, false);
                        Filter.ApplyFilters(datdata);
                        Cleaner.ApplyCleaning(datdata);
                        Remover.ApplyRemovals(datdata);

                        // Write out the file
                        Writer.Write(datdata, OutputDir);
                    }
                    else
                    {
                        Console.WriteLine();
                        OutputRecursive(0);
                    }
                }
            }

            return(true);
        }
예제 #3
0
        public override bool ProcessFeatures(Dictionary <string, Feature> features)
        {
            // If the base fails, just fail out
            if (!base.ProcessFeatures(features))
            {
                return(false);
            }

            // Get a list of files from the input datfiles
            var datfiles     = GetList(features, DatListValue);
            var datfilePaths = PathTool.GetFilesOnly(datfiles);

            // Get feature flags
            TreatAsFile asFiles   = GetTreatAsFiles(features);
            bool        hashOnly  = GetBoolean(features, HashOnlyValue);
            bool        quickScan = GetBoolean(features, QuickValue);

            // If we are in individual mode, process each DAT on their own
            if (GetBoolean(features, IndividualValue))
            {
                foreach (ParentablePath datfile in datfilePaths)
                {
                    // Parse in from the file
                    DatFile datdata = DatFile.Create();
                    Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);

                    // Perform additional processing steps
                    Extras.ApplyExtras(datdata);
                    Splitter.ApplySplitting(datdata, true);
                    Filter.ApplyFilters(datdata);
                    Cleaner.ApplyCleaning(datdata);
                    Remover.ApplyRemovals(datdata);

                    // Set depot information
                    datdata.Header.InputDepot = Header.InputDepot?.Clone() as DepotInformation;

                    // If we have overridden the header skipper, set it now
                    if (!string.IsNullOrEmpty(Header.HeaderSkipper))
                    {
                        datdata.Header.HeaderSkipper = Header.HeaderSkipper;
                    }

                    // If we have the depot flag, respect it
                    if (Header.InputDepot?.IsActive ?? false)
                    {
                        Verification.VerifyDepot(datdata, Inputs);
                    }
                    else
                    {
                        // Loop through and add the inputs to check against
                        logger.User("Processing files:\n");
                        foreach (string input in Inputs)
                        {
                            DatTools.DatFromDir.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
                        }

                        Verification.VerifyGeneric(datdata, hashOnly);
                    }

                    // Now write out if there are any items left
                    Writer.WriteStatsToConsole(datdata);
                    Writer.Write(datdata, OutputDir);
                }
            }
            // Otherwise, process all DATs into the same output
            else
            {
                InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");

                // Add all of the input DATs into one huge internal DAT
                DatFile datdata = DatFile.Create();
                foreach (ParentablePath datfile in datfilePaths)
                {
                    Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);
                }

                // Perform additional processing steps
                Extras.ApplyExtras(datdata);
                Splitter.ApplySplitting(datdata, true);
                Filter.ApplyFilters(datdata);
                Cleaner.ApplyCleaning(datdata);
                Remover.ApplyRemovals(datdata);

                // Set depot information
                datdata.Header.InputDepot = Header.InputDepot?.Clone() as DepotInformation;

                // If we have overridden the header skipper, set it now
                if (!string.IsNullOrEmpty(Header.HeaderSkipper))
                {
                    datdata.Header.HeaderSkipper = Header.HeaderSkipper;
                }

                watch.Stop();

                // If we have the depot flag, respect it
                if (Header.InputDepot?.IsActive ?? false)
                {
                    Verification.VerifyDepot(datdata, Inputs);
                }
                else
                {
                    // Loop through and add the inputs to check against
                    logger.User("Processing files:\n");
                    foreach (string input in Inputs)
                    {
                        DatTools.DatFromDir.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
                    }

                    Verification.VerifyGeneric(datdata, hashOnly);
                }

                // Now write out if there are any items left
                Writer.WriteStatsToConsole(datdata);
                Writer.Write(datdata, OutputDir);
            }

            return(true);
        }