// Group: Functions // __________________________________________________________________________ /* Constructor: BuildState */ public BuildState() { sourceFilesWithContent = new NumberSet(); classesWithContent = new NumberSet(); usedImageFiles = new NumberSet(); usedMenuDataFiles = new NumberSetTable <Hierarchy>(); accessLock = new object(); }
// Group: Functions // __________________________________________________________________________ /* Constructor: BuildState */ public BuildState() { sourceFilesWithContent = new NumberSet(); classesWithContent = new NumberSet(); usedImageFiles = new NumberSet(); usedMenuDataFiles = new NumberSetTable <Hierarchy>(); homePage = null; generatedTimestamp = null; // Default to true instead of false since the default home page uses it. Also, having this set incorrectly true just // means an extra file is rebuilt, whereas setting it incorrectly false means a file that should be rebuilt isn't. homePageUsesTimestamp = true; accessLock = new object(); }
/* Function: AssignDataFiles * * Segments the menu into smaller pieces and generates data file names. * * Returns: * * A table mapping each <Hierarchy> to the data file numbers used for it, such as Files -> {1-4}. */ protected NumberSetTable <Hierarchy> AssignDataFiles() { NumberSetTable <Hierarchy> usedDataFiles = new NumberSetTable <Hierarchy>(); if (rootFileMenu != null) { AssignDataFiles(rootFileMenu, ref usedDataFiles); } if (rootClassMenu != null) { AssignDataFiles(rootClassMenu, ref usedDataFiles); } if (rootDatabaseMenu != null) { AssignDataFiles(rootDatabaseMenu, ref usedDataFiles); } return(usedDataFiles); }
/* Function: AssignDataFiles * * Segments the menu into smaller pieces and generates data file names. * * Parameters: * * container - The container to segment. This will always be assigned a data file name. * usedDataFiles - A table mapping each <Hierarchy> to the data file numbers already in use for it, such as Files -> {1-4}. * It will be used to determine which numbers are available to assign, and new numbers will be added to it * as they are assigned by this function. */ protected void AssignDataFiles(JSONMenuEntries.Container container, ref NumberSetTable <Hierarchy> usedDataFiles) { // Generate the data file name for this container. Hierarchy hierarchy = container.MenuEntry.Hierarchy; int dataFileNumber = usedDataFiles.LowestAvailable(hierarchy); usedDataFiles.Add(hierarchy, dataFileNumber); container.DataFileName = Paths.Menu.OutputFile(Target.OutputFolder, hierarchy, dataFileNumber, fileNameOnly: true); // The data file has to include all the members in this container no matter what, so we don't check the size against the limit // yet. int containerJSONSize = container.JSONBeforeMembers.Length + container.JSONAfterMembers.Length + container.JSONLengthOfMembers; // Now find all the subcontainers, which are now candidates for inlining. List <JSONMenuEntries.Container> inliningCandidates = null; foreach (var member in container.Members) { if (member is JSONMenuEntries.Container) { var containerMember = (JSONMenuEntries.Container)member; if (inliningCandidates == null) { inliningCandidates = new List <JSONMenuEntries.Container>(); } inliningCandidates.Add(containerMember); } } // If there's no subcontainers we're done. if (inliningCandidates == null) { return; } // Go through all our candidates and inline them smallest to largest. This prevents one very large container early in the list // from causing all the other ones to be broken out into separate files. // Keep track of which containers were inlined so we can possibly inline their members as well. List <JSONMenuEntries.Container> inlinedContainers = new List <JSONMenuEntries.Container>(); while (inliningCandidates.Count > 0) { // Find the smallest of the candidates int smallestInliningCandidateIndex = 0; int smallestInliningCandidateSize = inliningCandidates[0].JSONLengthOfMembers; for (int i = 1; i < inliningCandidates.Count; i++) { if (inliningCandidates[i].JSONLengthOfMembers < smallestInliningCandidateSize) { smallestInliningCandidateIndex = i; smallestInliningCandidateSize = inliningCandidates[i].JSONLengthOfMembers; } } // If the smallest candidate fits into the segment length limits, inline it if (containerJSONSize + smallestInliningCandidateSize <= SegmentLength) { containerJSONSize += smallestInliningCandidateSize; inlinedContainers.Add(inliningCandidates[smallestInliningCandidateIndex]); inliningCandidates.RemoveAt(smallestInliningCandidateIndex); } // If the smallest candidate doesn't fit, that means it and all the remaining candidates need to get their own files else { foreach (var inliningCandidate in inliningCandidates) { AssignDataFiles(inliningCandidate, ref usedDataFiles); } inliningCandidates.Clear(); } // If there's no more candidates, go through the list of inlined containers and add their subcontainers to the candidates // list. This allows us to continue inlining for multiple levels as long as we have space for it. // This algorithm causes inlining to happen breadth-first instead of depth-first, which we want, but it also allows lower // depths to continue to be inlined even if the parent level couldn't be done completely. It's possible that when there's // no room for all the top-level containers a few more lower level ones could still be squeezed in. if (inliningCandidates.Count == 0 && inlinedContainers.Count > 0) { foreach (var inlinedContainer in inlinedContainers) { foreach (var member in inlinedContainer.Members) { if (member is JSONMenuEntries.Container) { inliningCandidates.Add((JSONMenuEntries.Container)member); } } } inlinedContainers.Clear(); } } }