Example #1
0
        /// <summary>
        /// Start this worker
        /// </summary>
        public void Start(LineBasedTextWriter Log)
        {
            // Make sure there's not already a managed task in process
            if (ActiveInstance != null)
            {
                throw new Exception("Cannot start a new worker while a managed process is already active");
            }

            // Create the new instance
            if (Type == SequenceProbeType.Optimize)
            {
                // Update our dependencies
                UpdateDependencies(Log);

                // Write the task to disk
                Worker.Serialize(TaskStateFile.FullName);

                // Construct the new managed task, and start it. If the task is already complete, create a dummy thread which just returns immediately.
                ActiveInstance = new ManagedTaskThread(Writer => FindNextDependency(TaskStateFile.FullName, Writer));
            }
            else if (Type == SequenceProbeType.Verify)
            {
                // Write the task to disk
                Worker.Serialize(TaskStateFile.FullName);

                // Construct the new managed task, and start it. If the task is already complete, create a dummy thread which just returns immediately.
                ActiveInstance = new ManagedTaskThread(Writer => Verify(TaskStateFile.FullName, Writer));
            }
            else
            {
                throw new NotImplementedException();
            }
            ActiveInstance.Start();
        }
Example #2
0
        /// <summary>
        /// Runs the FindDependencyTask with the given state file
        /// </summary>
        /// <param name="StateFile">Path to the state file</param>
        /// <param name="Writer">Writer for any messages</param>
        /// <returns>Zero on success</returns>
        static int FindNextDependency(string StateFile, LineBasedTextWriter Writer)
        {
            SequenceWorker Task = SequenceWorker.Deserialize(StateFile);

            Task.FindNextDependency(Writer);
            Task.Serialize(StateFile + ".out");
            return(0);
        }
Example #3
0
        /// <summary>
        /// Update the list of known dependencies to include dependencies of our already known dependencies
        /// </summary>
        /// <returns>True if the dependencies were updated, false if a dependency was encountered which is not available to this fragment</returns>
        void UpdateDependencies(LineBasedTextWriter Log)
        {
            // Update the task to account for any new dependencies that may have been found. Loop back through all the fragments so we include
            // dependencies we find along the way.
            for (int FragmentIdx = Worker.FragmentCount - 1; FragmentIdx > 0; FragmentIdx--)
            {
                SourceFragment Fragment = Fragments[FragmentIdx];
                if (Worker.KnownDependencies.Contains(FragmentIdx) && Fragment.Dependencies != null)
                {
                    // Include the dependencies of this fragment in our dependencies list
                    List <SourceFragment> MissingDependencies = new List <SourceFragment>();
                    foreach (SourceFragment Dependency in Fragment.Dependencies)
                    {
                        int DependencyIdx = Array.IndexOf(Fragments, Dependency);
                        if (DependencyIdx == -1 || DependencyIdx > FragmentIdx)
                        {
                            MissingDependencies.Add(Dependency);
                        }
                        else
                        {
                            Worker.KnownDependencies.Add(DependencyIdx);
                        }
                    }

                    // If any were missing (because they were calculated by a different derivation), provide a useful diagnostic. Otherwise re-queue the worker
                    // to find the next dependency.
                    if (MissingDependencies.Count > 0 && MissingDependencies.Any(x => !WarnedMissingDependencies.Contains(x)))
                    {
                        StringBuilder Message = new StringBuilder();
                        Log.WriteLine("warning: could not find dependencies of '{0}':", Fragment.Location.FullName);
                        foreach (SourceFragment MissingDependency in MissingDependencies)
                        {
                            Log.WriteLine("    {0}", MissingDependency.Location);
                        }
                        Log.WriteLine("In fragments for '{0}':", LastFragment.Location);
                        for (int Idx = 0; Idx < Worker.RemainingFragmentCount; Idx++)
                        {
                            Log.WriteLine("    {0,3}: {1}", Idx, Fragments[Idx].Location.FullName);
                        }
                        for (int Idx = Worker.RemainingFragmentCount; Idx < Fragments.Length; Idx++)
                        {
                            Log.WriteLine("    {0,3}: ({1})", Idx, Fragments[Idx].Location.FullName);
                        }
                        WarnedMissingDependencies.UnionWith(MissingDependencies);
                    }
                }
            }
        }
Example #4
0
 /// <summary>
 /// Prints all the symbols with conflicting definitions
 /// </summary>
 /// <param name="Log">Writer for log messagsThe source fragment to parse</param>
 public void PrintConflicts(LineBasedTextWriter Log)
 {
     foreach (string SymbolName in Lookup.Keys)
     {
         IReadOnlyCollection <Symbol> Symbols = Lookup[SymbolName];
         if (Symbols.Count > 0)
         {
             int NumTypes = Symbols.Select(x => x.Type).Where(x => x != SymbolType.Macro).Distinct().Count();
             if (NumTypes > 1)
             {
                 Log.WriteLine("warning: conflicting declarations of '{0}':", SymbolName);
                 foreach (Symbol Symbol in Symbols)
                 {
                     Log.WriteLine("  {0} in {1}", Symbol.Type, Symbol.Fragment);
                 }
             }
         }
     }
 }
        /// <summary>
        /// Generates a report listing the most frequently referenced opaque symbols in each module
        /// </summary>
        /// <param name="Files">The files to include in the report</param>
        /// <param name="ReportFileLocation">Output file for the report</param>
        /// <param name="Log">Writer for log output</param>
        public static void Generate(FileReference ReportFileLocation, DirectoryReference InputDir, HashSet <SourceFile> PreprocessedFiles, LineBasedTextWriter Log)
        {
            Log.WriteLine("Writing {0}...", ReportFileLocation.FullName);

            // Get a count of files referencing each symbol
            Dictionary <Symbol, int> SymbolToRefCount       = new Dictionary <Symbol, int>();
            Dictionary <Symbol, int> SymbolToOpaqueRefCount = new Dictionary <Symbol, int>();

            foreach (SourceFile PreprocessedFile in PreprocessedFiles)
            {
                if (PreprocessedFile.Fragments != null)
                {
                    HashSet <Symbol> Symbols          = new HashSet <Symbol>();
                    HashSet <Symbol> NonOpaqueSymbols = new HashSet <Symbol>();
                    foreach (SourceFragment Fragment in PreprocessedFile.Fragments)
                    {
                        foreach (KeyValuePair <Symbol, SymbolReferenceType> Pair in Fragment.ReferencedSymbols)
                        {
                            Symbols.Add(Pair.Key);
                            if (Pair.Value != SymbolReferenceType.Opaque)
                            {
                                NonOpaqueSymbols.Add(Pair.Key);
                            }
                        }
                    }

                    foreach (Symbol Symbol in Symbols)
                    {
                        int Count;
                        SymbolToRefCount.TryGetValue(Symbol, out Count);
                        SymbolToRefCount[Symbol] = Count + 1;

                        int OpaqueCount;
                        SymbolToOpaqueRefCount.TryGetValue(Symbol, out OpaqueCount);
                        SymbolToOpaqueRefCount[Symbol] = NonOpaqueSymbols.Contains(Symbol) ? OpaqueCount : OpaqueCount + 1;
                    }
                }
            }

            // Build a map of module to symbols
            MultiValueDictionary <BuildModule, Symbol> ModuleToSymbols = new MultiValueDictionary <BuildModule, Symbol>();

            foreach (Symbol Symbol in SymbolToRefCount.Keys)
            {
                SourceFile File = Symbol.Fragment.File;
                if (File.Module != null)
                {
                    ModuleToSymbols.Add(File.Module, Symbol);
                }
            }

            // Write out a CSV report containing the list of symbols and number of files referencing them
            using (StreamWriter Writer = new StreamWriter(ReportFileLocation.FullName))
            {
                Writer.WriteLine("Module,Symbol,Fwd,RefCount,OpaqueRefCount");
                foreach (BuildModule Module in ModuleToSymbols.Keys)
                {
                    foreach (Symbol Symbol in ModuleToSymbols[Module].OrderByDescending(x => SymbolToOpaqueRefCount[x]))
                    {
                        Writer.WriteLine("{0},{1},{2},{3},{4}", Module.Name, Symbol.Name, Symbol.ForwardDeclaration, SymbolToRefCount[Symbol], SymbolToOpaqueRefCount[Symbol]);
                    }
                }
            }
        }
Example #6
0
        /// <summary>
        /// Format an include of the given file
        /// </summary>
        /// <param name="FromDirectory">The directory containing the file with the #include directive</param>
        /// <param name="IncludeFile">File to include</param>
        /// <param name="IncludePaths">Directories to base relative include paths from</param>
        /// <param name="SystemIncludePaths">Directories to base system include paths from</param>
        /// <returns>Formatted include path, with surrounding quotes</returns>
        public static string FormatInclude(DirectoryReference FromDirectory, FileReference IncludeFile, IEnumerable <DirectoryReference> IncludePaths, IEnumerable <DirectoryReference> SystemIncludePaths, LineBasedTextWriter Log)
        {
            string IncludeText;

            if (!TryFormatInclude(FromDirectory, IncludeFile, IncludePaths, SystemIncludePaths, out IncludeText))
            {
                Log.WriteLine("warning: cannot create relative path for {0}; assuming <{1}>", IncludeFile.FullName, IncludeFile.GetFileName());
                IncludeText = "<" + IncludeFile.GetFileName() + ">";
            }
            return(IncludeText);
        }
        /// <summary>
        /// Generate a report showing the number of preprocessed lines in the selected files
        /// </summary>
        /// <param name="Files">The files to include in the report</param>
        /// <param name="ReportFileLocation">Output file for the report</param>
        /// <param name="Log">Writer for log output</param>
        public static void Generate(FileReference ReportFileLocation, IEnumerable <SourceFile> Files, LineBasedTextWriter Log)
        {
            Log.WriteLine("Writing {0}...", ReportFileLocation.FullName);

            // Build a list of all the files in the report
            SourceFile[] AllFiles = Files.SelectMany(x => FindIncludedFiles(x)).Distinct().ToArray();

            // Find a map of file to the number of preprocessed lines in it
            Dictionary <SourceFile, ComplexityData> FileToReportData = new Dictionary <SourceFile, ComplexityData>();

            foreach (SourceFile File in AllFiles)
            {
                // Create the complexity data for this file
                ComplexityData ReportData = FindOrAddComplexityData(File, FileToReportData);
                Debug.Assert(ReportData.NumPreprocessedLines == 0);

                // Calculate the preprocessed line count, and update each file it includes with this file
                foreach (SourceFile IncludedFile in FindIncludedFiles(File))
                {
                    ComplexityData IncludedFileReportData = FindOrAddComplexityData(IncludedFile, FileToReportData);
                    IncludedFileReportData.IncludedBy.Add(File);
                    ReportData.NumPreprocessedLines += IncludedFile.Text.Lines.Length;
                }

                // Add this file to each file it directly includes
                foreach (PreprocessorMarkup Markup in File.Markup)
                {
                    if (Markup.Type == PreprocessorMarkupType.Include && Markup.IsActive)
                    {
                        foreach (SourceFile IncludedFile in Markup.OutputIncludedFiles)
                        {
                            ComplexityData IncludedFileReportData = FindOrAddComplexityData(IncludedFile, FileToReportData);
                            IncludedFileReportData.DirectlyIncludedBy.Add(File);
                        }
                    }
                }
            }

            // Write out a CSV report containing the list of files and their line counts
            using (StreamWriter Writer = new StreamWriter(ReportFileLocation.FullName))
            {
                Writer.WriteLine("File,Line Count,Num Indirect Includes,Num Direct Includes,Direct Includes");
                foreach (KeyValuePair <SourceFile, ComplexityData> Pair in FileToReportData.OrderByDescending(x => x.Value.NumPreprocessedLines))
                {
                    string IncludedByList = String.Join(", ", Pair.Value.DirectlyIncludedBy.Select(x => GetDisplayName(x)).OrderBy(x => x));
                    Writer.WriteLine("{0},{1},{2},{3},\"{4}\"", GetDisplayName(Pair.Key), Pair.Value.NumPreprocessedLines, Pair.Value.IncludedBy.Count, Pair.Value.DirectlyIncludedBy.Count, IncludedByList);
                }
            }
        }
Example #8
0
        /// <summary>
        /// Creates an optimized output file
        /// </summary>
        /// <param name="InputFile">The input file that this output file corresponds to</param>
        /// <param name="HeaderFile">The corresponding header file</param>
        /// <param name="PreviousFiles">List of files parsed before this one</param>
        /// <param name="Includes">The active set of includes parsed for this file</param>
        /// <param name="InputFileStack">The active include stack</param>
        /// <param name="FwdSymbolToHeader"></param>
        /// <param name="bMakeStandalone">Whether to make this output file standalone</param>
        /// <param name="Log">Writer for log messages</param>
        public static OutputFile CreateOptimizedOutputFile(SourceFile InputFile, OutputFile HeaderFile, List <OutputFile> PreviousFiles, List <OutputFileInclude> Includes, List <SourceFile> InputFileStack, Dictionary <Symbol, OutputFile> FwdSymbolToHeader, bool bMakeStandalone, LineBasedTextWriter Log)
        {
            Debug.Assert(HeaderFile == null || (InputFile.Flags & SourceFileFlags.TranslationUnit) != 0);

            // Write the state
            InputFile.LogVerbose("InputFile={0}", InputFile.Location.FullName);
            InputFile.LogVerbose("InputFile.Flags={0}", InputFile.Flags.ToString());
            if (HeaderFile != null)
            {
                InputFile.LogVerbose("HeaderFile={0}", HeaderFile.InputFile.Location.FullName);
                InputFile.LogVerbose("HeaderFile.Flags={0}", HeaderFile.InputFile.Flags.ToString());
            }
            InputFile.LogVerbose("");
            for (int Idx = 0; Idx < InputFileStack.Count; Idx++)
            {
                InputFile.LogVerbose("InputFileStack[{0}]={1}", Idx, InputFileStack[Idx].Location.FullName);
            }
            InputFile.LogVerbose("");
            for (int Idx = 0; Idx < PreviousFiles.Count; Idx++)
            {
                InputFile.LogVerbose("PreviousFiles[{0}]={1}", Idx, PreviousFiles[Idx].InputFile.Location.FullName);
            }
            InputFile.LogVerbose("");
            for (int Idx = 0; Idx < Includes.Count; Idx++)
            {
            }
            InputFile.LogVerbose("");
            for (int Idx = 0; Idx < Includes.Count; Idx++)
            {
                OutputFileInclude Include = Includes[Idx];
                InputFile.LogVerbose("Includes[{0}]={1}", Idx, Includes[Idx].TargetFile.InputFile.Location.FullName);
                foreach (SourceFragment Fragment in Include.FinalFiles.SelectMany(x => x.IncludedFragments))
                {
                    InputFile.LogVerbose("Includes[{0}].FinalFiles.IncludedFragments={1}", Idx, Fragment);
                }
            }

            // Traverse through all the included headers, figuring out the first unique include for each file and fragment
            HashSet <OutputFile>     VisitedFiles     = new HashSet <OutputFile>();
            HashSet <SourceFragment> VisitedFragments = new HashSet <SourceFragment>();

            // Go through the standalone headers first
            OutputFile MonolithicHeader = null;

            if (HeaderFile == null && (InputFile.Flags & SourceFileFlags.Standalone) != 0 && (InputFile.Flags & SourceFileFlags.External) == 0 && (InputFile.Flags & SourceFileFlags.Aggregate) == 0)
            {
                // Insert a dummy include to receive all the inserted headers
                OutputFileInclude ImplicitInclude = new OutputFileInclude(-1, null);
                ImplicitInclude.ExpandedReferences = new List <OutputFileReference>();
                Includes.Insert(0, ImplicitInclude);

                // Determine which monolithic header to use
                IEnumerable <OutputFile> PotentialMonolithicHeaders = PreviousFiles.Union(Includes.Select(x => x.TargetFile).Where(x => x != null).SelectMany(x => x.IncludedFiles));
                if (InputFile.Module != null && InputFile.Module.PublicDependencyModules.Union(InputFile.Module.PrivateDependencyModules).Any(x => x.Name == "Core"))
                {
                    MonolithicHeader = PotentialMonolithicHeaders.FirstOrDefault(x => (x.InputFile.Flags & SourceFileFlags.IsCoreMinimal) != 0);
                }
                else
                {
                    MonolithicHeader = PotentialMonolithicHeaders.FirstOrDefault(x => (x.InputFile.Flags & SourceFileFlags.IsCoreTypes) != 0);
                }

                // Update the dependencies to treat all the contents of a monolithic header as pinned
                if (MonolithicHeader != null)
                {
                    SourceFragment[] UniqueFragments = MonolithicHeader.IncludedFragments.Except(VisitedFragments).ToArray();
                    ImplicitInclude.ExpandedReferences.Add(new OutputFileReference(MonolithicHeader, UniqueFragments));
                    VisitedFragments.UnionWith(UniqueFragments);
                    VisitedFiles.Add(MonolithicHeader);
                }

                // Insert all the forward declaration headers, but only treat them as supplying the forward declarations themselves. They may happen to include
                // some utility classes (eg. TSharedPtr), and we don't want to include an unrelated header to satisfy that dependency.
                foreach (OutputFile FwdHeader in FwdSymbolToHeader.Values)
                {
                    FindExpandedReferences(FwdHeader, ImplicitInclude.ExpandedReferences, VisitedFiles, VisitedFragments, true);
                }

                // Add all the other files
                if (bMakeStandalone)
                {
                    foreach (OutputFile PreviousFile in PreviousFiles)
                    {
                        if ((InputFile.Flags & SourceFileFlags.Standalone) != 0 && (PreviousFile.InputFile.Flags & SourceFileFlags.Inline) == 0 && (PreviousFile.InputFile.Flags & SourceFileFlags.Pinned) == 0 && VisitedFiles.Add(PreviousFile))
                        {
                            SourceFragment[] UniqueFragments = PreviousFile.IncludedFragments.Except(VisitedFragments).ToArray();
                            ImplicitInclude.ExpandedReferences.Add(new OutputFileReference(PreviousFile, UniqueFragments));
                            VisitedFragments.UnionWith(UniqueFragments);
                        }
                    }
                }
            }

            // Figure out a list of files which are uniquely reachable through each include. Force an include of the matching header as the first thing.
            OutputFileReference ForcedHeaderFileReference = null;

            foreach (OutputFileInclude Include in Includes)
            {
                if (Include.ExpandedReferences == null)
                {
                    Include.ExpandedReferences = new List <OutputFileReference>();
                    if (Include == Includes[0] && HeaderFile != null)
                    {
                        ForcedHeaderFileReference = new OutputFileReference(HeaderFile, HeaderFile.IncludedFragments);
                        Include.ExpandedReferences.Add(ForcedHeaderFileReference);
                        VisitedFragments.UnionWith(HeaderFile.IncludedFragments);
                    }
                    FindExpandedReferences(Include.TargetFile, Include.ExpandedReferences, VisitedFiles, VisitedFragments, true);
                }
            }

            // Find all the symbols which are referenced by this file
            HashSet <SourceFragment> FragmentsWithReferencedSymbols = new HashSet <SourceFragment>();

            foreach (SourceFragment Fragment in InputFile.Fragments)
            {
                foreach (KeyValuePair <Symbol, SymbolReferenceType> ReferencedSymbol in Fragment.ReferencedSymbols)
                {
                    if (ReferencedSymbol.Value == SymbolReferenceType.RequiresDefinition)
                    {
                        FragmentsWithReferencedSymbols.Add(ReferencedSymbol.Key.Fragment);
                    }
                }
            }

            // Aggregate headers are designed to explicitly include headers from the current module. Expand out a list of them, so they can be included when encountered.
            HashSet <OutputFile> ExplicitIncludes = new HashSet <OutputFile>();

            if ((InputFile.Flags & SourceFileFlags.Aggregate) != 0)
            {
                foreach (OutputFileInclude Include in Includes)
                {
                    ExplicitIncludes.UnionWith(Include.ExpandedReferences.Where(x => x.File.InputFile.Location.IsUnderDirectory(InputFile.Location.Directory)).Select(x => x.File));
                }
                foreach (OutputFileInclude Include in Includes)
                {
                    ExplicitIncludes.Remove(Include.TargetFile);
                }
            }

            // Create the list of remaining dependencies for this file, and add any forward declarations
            HashSet <SourceFragment> Dependencies        = new HashSet <SourceFragment>();
            List <Symbol>            ForwardDeclarations = new List <Symbol>();

            AddForwardDeclarations(InputFile, ForwardDeclarations, Dependencies, FwdSymbolToHeader);

            // Reduce the list of includes to those that are required.
            for (int FragmentIdx = InputFile.Fragments.Length - 1, IncludeIdx = Includes.Count - 1; FragmentIdx >= 0; FragmentIdx--)
            {
                // Update the dependency lists for this fragment
                SourceFragment InputFragment = InputFile.Fragments[FragmentIdx];
                if (InputFragment.Dependencies != null)
                {
                    Dependencies.UnionWith(InputFragment.Dependencies);
                }
                Dependencies.Remove(InputFragment);

                // Scan backwards through the list of includes, expanding each include to those which are required
                int MarkupMin = (FragmentIdx == 0)? -1 : InputFragment.MarkupMin;
                for (; IncludeIdx >= 0 && Includes[IncludeIdx].MarkupIdx >= MarkupMin; IncludeIdx--)
                {
                    OutputFileInclude Include = Includes[IncludeIdx];

                    // Always include the same header for aggregates
                    if ((InputFile.Flags & SourceFileFlags.Aggregate) != 0)
                    {
                        Include.FinalFiles.Insert(0, Include.TargetFile);
                        Dependencies.ExceptWith(Include.TargetFile.IncludedFragments);
                        Dependencies.UnionWith(Include.TargetFile.Dependencies);
                    }

                    // Include any indirectly included files
                    for (int Idx = Include.ExpandedReferences.Count - 1; Idx >= 0; Idx--)
                    {
                        // Make sure we haven't already added it above
                        OutputFileReference Reference = Include.ExpandedReferences[Idx];
                        if (!Include.FinalFiles.Contains(Reference.File))
                        {
                            if (Dependencies.Any(x => Reference.UniqueFragments.Contains(x)) ||
                                (Reference.File.InputFile.Flags & SourceFileFlags.Pinned) != 0 ||
                                Reference == ForcedHeaderFileReference ||
                                Reference.File == MonolithicHeader ||
                                ExplicitIncludes.Contains(Reference.File) ||
                                ((InputFile.Flags & SourceFileFlags.Aggregate) != 0 && Reference.File == Include.TargetFile) ||                                 // Always include the original header for aggregates. They are written explicitly to include certain files.
                                Reference.UniqueFragments.Any(x => FragmentsWithReferencedSymbols.Contains(x)))
                            {
                                Include.FinalFiles.Insert(0, Reference.File);
                                Dependencies.ExceptWith(Reference.File.IncludedFragments);
                                Dependencies.UnionWith(Reference.File.Dependencies);
                            }
                        }
                    }
                }
            }

            // Remove any includes that are already included by the matching header
            if (HeaderFile != null)
            {
                HashSet <OutputFile> HeaderIncludedFiles = new HashSet <OutputFile>(HeaderFile.Includes.SelectMany(x => x.FinalFiles));
                foreach (OutputFileInclude Include in Includes)
                {
                    Include.FinalFiles.RemoveAll(x => HeaderIncludedFiles.Contains(x));
                }
            }

            // Check that all the dependencies have been satisfied
            if (Dependencies.Count > 0)
            {
                // Find those which are completely invalid
                List <SourceFragment> InvalidDependencies = Dependencies.Where(x => !InputFileStack.Contains(x.File)).ToList();
                if (InvalidDependencies.Count > 0)
                {
                    Log.WriteLine("warning: {0} does not include {1}{2}; may have missing dependencies.", InputFile, String.Join(", ", InvalidDependencies.Select(x => x.Location.FullName).Take(3)), (InvalidDependencies.Count > 3)? String.Format(" and {0} others", InvalidDependencies.Count - 3) : "");
                }
                Dependencies.ExceptWith(InvalidDependencies);

                // Otherwise warn about those which were not pinned
                foreach (SourceFile DependencyFile in Dependencies.Select(x => x.File))
                {
                    Log.WriteLine("warning: {0} is included by {1} ({2}), but depends on it and should be marked as pinned.", InputFile, DependencyFile, String.Join(" -> ", InputFileStack.SkipWhile(x => x != DependencyFile).Select(x => x.Location.GetFileName())));
                }

                // Mark it as non-standalone and pinned
                InputFile.Flags = (InputFile.Flags | SourceFileFlags.Pinned) & ~SourceFileFlags.Standalone;
            }

            // Do one more forward pass through all the headers, and remove anything that's included more than once. That can happen if we have a referenced symbol as well as
            // an explicit include, for example.
            HashSet <OutputFile> FinalIncludes = new HashSet <OutputFile>();

            foreach (OutputFileInclude Include in Includes)
            {
                for (int Idx = 0; Idx < Include.FinalFiles.Count; Idx++)
                {
                    if (!FinalIncludes.Add(Include.FinalFiles[Idx]))
                    {
                        Include.FinalFiles.RemoveAt(Idx);
                        Idx--;
                    }
                }
            }

            // Create the optimized file
            OutputFile OptimizedFile = new OutputFile(InputFile, Includes, Dependencies, ForwardDeclarations);

            // Write the verbose log
            InputFile.LogVerbose("");
            foreach (OutputFile IncludedFile in OptimizedFile.IncludedFiles)
            {
                InputFile.LogVerbose("Output: {0}", IncludedFile.InputFile.Location.FullName);
            }

            // Return the optimized file
            return(OptimizedFile);
        }
Example #9
0
        /// <summary>
        /// Create optimized output files from the given input files
        /// </summary>
        /// <param name="CppFiles">Input files to optimize</param>
        /// <param name="Log">Writer for log messages</param>
        /// <returns>Array of output files</returns>
        public static void PrepareFilesForOutput(IEnumerable <SourceFile> CppFiles, Dictionary <SourceFile, SourceFile> CppFileToHeaderFile, Dictionary <Symbol, SourceFile> FwdSymbolToInputHeader, bool bMakeStandalone, bool bUseOriginalIncludes, LineBasedTextWriter Log)
        {
            // Cache of all the created output files
            Dictionary <SourceFile, OutputFile> OutputFileLookup = new Dictionary <SourceFile, OutputFile>();

            // Create output files for all the forward declaration headers
            Dictionary <Symbol, OutputFile> FwdSymbolToHeader = new Dictionary <Symbol, OutputFile>();

            foreach (KeyValuePair <Symbol, SourceFile> Pair in FwdSymbolToInputHeader)
            {
                List <SourceFile> InputFileStack = new List <SourceFile>();
                InputFileStack.Add(Pair.Value);

                HashList <OutputFile> IncludedFiles = new HashList <OutputFile>();
                FwdSymbolToHeader[Pair.Key] = FindOrCreateOutputFile(InputFileStack, CppFileToHeaderFile, IncludedFiles, OutputFileLookup, FwdSymbolToHeader, bMakeStandalone, bUseOriginalIncludes, Log);
            }

            // Create all the placeholder output files
            foreach (SourceFile CppFile in CppFiles)
            {
                List <SourceFile> InputFileStack = new List <SourceFile>();
                InputFileStack.Add(CppFile);

                HashList <OutputFile> IncludedFiles = new HashList <OutputFile>();
                FindOrCreateOutputFile(InputFileStack, CppFileToHeaderFile, IncludedFiles, OutputFileLookup, FwdSymbolToHeader, bMakeStandalone, bUseOriginalIncludes, Log);
            }

            // Set the results on the source files
            foreach (SourceFile File in OutputFileLookup.Keys)
            {
                OutputFile OutputFile = OutputFileLookup[File];
                foreach (OutputFileInclude Include in OutputFile.Includes)
                {
                    if (Include.MarkupIdx < 0)
                    {
                        File.MissingIncludes.AddRange(Include.FinalFiles.Select(x => x.InputFile));
                    }
                    else
                    {
                        File.Markup[Include.MarkupIdx].OutputIncludedFiles = Include.FinalFiles.Select(x => x.InputFile).ToList();
                    }
                }
                foreach (Symbol Symbol in OutputFile.ForwardDeclarations)
                {
                    if (!String.IsNullOrEmpty(Symbol.ForwardDeclaration))
                    {
                        File.ForwardDeclarations.Add(Symbol.ForwardDeclaration);
                    }
                }
            }
        }
Example #10
0
        /// <summary>
        /// Find a mapping from PCH to the most included files by the files using it
        /// </summary>
        /// <param name="SourceFileToCompileEnvironment">Files being compiled</param>
        /// <param name="PchToIncludeFileCount">Mapping of PCH to included files</param>
        /// <param name="Log">Writer for log messages</param>
        static void FindPchInfo(BuildTarget Target, Dictionary <SourceFile, CompileEnvironment> SourceFileToCompileEnvironment, Dictionary <FileReference, PchInfo> FileToPchInfo, LineBasedTextWriter Log)
        {
            // Create a map of module to the shared PCH it uses
            Dictionary <BuildModule, FileReference> ModuleToPch = new Dictionary <BuildModule, FileReference>();

            // Recurse through all the includes for each source file
            Dictionary <FileReference, int> UsingPchCount = new Dictionary <FileReference, int>();

            foreach (KeyValuePair <SourceFile, CompileEnvironment> Pair in SourceFileToCompileEnvironment)
            {
                // Figure out which module it's in
                BuildModule Module = Pair.Key.Module;

                // Determine which PCH it's using
                FileReference UsingPch;
                if (!ModuleToPch.TryGetValue(Module, out UsingPch))
                {
                    if (Module.PrivatePCH != null)
                    {
                        UsingPch = Module.PrivatePCH;
                    }
                    else if (Module.PCHUsage == BuildModulePCHUsage.UseExplicitOrSharedPCHs || Module.PCHUsage == BuildModulePCHUsage.UseSharedPCHs || Module.PCHUsage == BuildModulePCHUsage.Default)
                    {
                        HashSet <BuildModule> PossibleModules = new HashSet <BuildModule>(Module.NonCircularDependencies.Where(x => x.SharedPCH != null));
                        foreach (BuildModule PossibleModule in PossibleModules.ToArray())
                        {
                            PossibleModules.ExceptWith(PossibleModule.NonCircularDependencies);
                        }
                        if (PossibleModules.Count == 0)
                        {
                            Log.WriteLine("warning: No valid PCH found for {0}", Module);
                        }
                        else if (PossibleModules.Count == 1)
                        {
                            UsingPch = PossibleModules.First().SharedPCH;
                        }
                        else
                        {
                            Log.WriteLine("warning: Multiple valid PCHs for {0}: {1}", Module, String.Join(",", PossibleModules.Select(x => x.Name)));
                        }
                    }
                    else
                    {
                        Log.WriteLine("warning: Unknown PCH for {0}", Module);
                    }
                    ModuleToPch[Module] = UsingPch;
                }

                // Make sure we're using a PCH
                if (UsingPch != null)
                {
                    // Get the info for this PCH
                    PchInfo Info;
                    if (!FileToPchInfo.TryGetValue(UsingPch, out Info))
                    {
                        Info = new PchInfo(UsingPch);

                        Info.PublicIncludePathModules.Add(Target.Modules.First(x => UsingPch.IsUnderDirectory(x.Directory)));
                        for (int Idx = 0; Idx < Info.PublicIncludePathModules.Count; Idx++)
                        {
                            BuildModule NextModule = Info.PublicIncludePathModules[Idx];
                            Info.PublicIncludePathModules.UnionWith(NextModule.PublicDependencyModules.Except(NextModule.CircularlyReferencedModules));
                        }

                        FileToPchInfo.Add(UsingPch, Info);
                    }

                    // Increment the number of files using this PCH
                    Info.SourceFiles.Add(Pair.Key.Location);

                    // Find all the included files
                    HashSet <SourceFile> IncludedFiles = new HashSet <SourceFile>();
                    FindIncludedFiles(Pair.Key, IncludedFiles);

                    // Update the counts for each one
                    foreach (SourceFile IncludedFile in IncludedFiles)
                    {
                        int IncludeCount;
                        Info.IncludedFiles.TryGetValue(IncludedFile, out IncludeCount);
                        Info.IncludedFiles[IncludedFile] = IncludeCount + 1;
                    }
                }
            }
        }
Example #11
0
 /// <summary>
 /// Adds a forward declaration to a map
 /// </summary>
 /// <param name="HeaderFile"></param>
 /// <param name="SymbolName"></param>
 /// <param name="Type"></param>
 /// <param name="SymbolToHeader"></param>
 /// <param name="Log"></param>
 void AddForwardDeclaration(SourceFile HeaderFile, string SymbolName, SymbolType Type, Dictionary <Symbol, SourceFile> SymbolToHeader, LineBasedTextWriter Log)
 {
     foreach (Symbol Symbol in Lookup.WithKey(SymbolName))
     {
         if (Symbol.Type == Type)
         {
             SourceFile ExistingHeaderFile;
             if (SymbolToHeader.TryGetValue(Symbol, out ExistingHeaderFile) && ExistingHeaderFile != HeaderFile)
             {
                 Log.WriteLine("warning: Symbol '{0}' was forward declared in '{1}' and '{2}'", Symbol.Name, HeaderFile.Location.GetFileName(), ExistingHeaderFile.Location.GetFileName());
             }
             else
             {
                 SymbolToHeader[Symbol] = HeaderFile;
             }
         }
     }
 }
Example #12
0
        /// <summary>
        /// Parse the forward declaration of a template class or struct
        /// </summary>
        /// <param name="OriginalReader">The current token reader</param>
        /// <param name="HeaderFile">The file to read from</param>
        /// <param name="SymbolToHeader">Map of symbol to the file containing it</param>
        /// <param name="Log">Writer for warnings and errors</param>
        /// <returns>True if a forward declaration was read, false otherwise</returns>
        bool ReadTemplateClassOrStructForwardDeclaration(TokenReader OriginalReader, SourceFile HeaderFile, Dictionary <Symbol, SourceFile> SymbolToHeader, LineBasedTextWriter Log)
        {
            if (OriginalReader.Current.Text != "template")
            {
                return(false);
            }

            TokenReader Reader = new TokenReader(OriginalReader);

            if (!Reader.MoveNext(TokenReaderContext.IgnoreNewlines))
            {
                return(false);
            }

            bool bMoveNext = true;

            if (!SkipTemplateArguments(Reader, ref bMoveNext) || !bMoveNext)
            {
                return(false);
            }

            SymbolType Type;

            if (Reader.Current.Text == "class")
            {
                Type = SymbolType.TemplateClass;
            }
            else if (Reader.Current.Text == "struct")
            {
                Type = SymbolType.TemplateStruct;
            }
            else
            {
                return(false);
            }

            if (!Reader.MoveNext(TokenReaderContext.IgnoreNewlines) || Reader.Current.Type != TokenType.Identifier)
            {
                return(false);
            }

            Token Identifier = Reader.Current;

            if (!Reader.MoveNext(TokenReaderContext.IgnoreNewlines) || Reader.Current.Text != ";")
            {
                return(false);
            }

            OriginalReader.Set(Reader);
            AddForwardDeclaration(HeaderFile, Identifier.Text, Type, SymbolToHeader, Log);
            return(true);
        }
Example #13
0
        /// <summary>
        /// Parse the forward declaration of an enum class
        /// </summary>
        /// <param name="OriginalReader">The current token reader</param>
        /// <param name="HeaderFile">The file to read from</param>
        /// <param name="SymbolToHeader">Map of symbol to the file containing it</param>
        /// <param name="Log">Writer for warnings and errors</param>
        /// <returns>True if a forward declaration was read, false otherwise</returns>
        bool ReadEnumClassForwardDeclaration(TokenReader OriginalReader, SourceFile HeaderFile, Dictionary <Symbol, SourceFile> SymbolToHeader, LineBasedTextWriter Log)
        {
            if (OriginalReader.Current.Text != "enum")
            {
                return(false);
            }

            TokenReader Reader = new TokenReader(OriginalReader);

            if (!Reader.MoveNext(TokenReaderContext.IgnoreNewlines))
            {
                return(false);
            }
            if (Reader.Current.Text == "class" && !Reader.MoveNext(TokenReaderContext.IgnoreNewlines))
            {
                return(false);
            }
            if (Reader.Current.Type != TokenType.Identifier)
            {
                return(false);
            }

            Token Identifier = Reader.Current;

            while (Reader.Current.Text != ";")
            {
                if (Reader.Current.Text == "{" || !Reader.MoveNext(TokenReaderContext.IgnoreNewlines))
                {
                    return(false);
                }
            }

            AddForwardDeclaration(HeaderFile, Identifier.Text, SymbolType.Enumeration, SymbolToHeader, Log);
            OriginalReader.Set(Reader);
            return(true);
        }
Example #14
0
        /// <summary>
        /// Parse the forward declaration of a class or struct
        /// </summary>
        /// <param name="OriginalReader">The current token reader</param>
        /// <param name="HeaderFile">The file to read from</param>
        /// <param name="SymbolToHeader">Map of symbol to the file containing it</param>
        /// <param name="Log">Writer for warnings and errors</param>
        /// <returns>True if a forward declaration was read, false otherwise</returns>
        bool ReadClassOrStructForwardDeclaration(TokenReader OriginalReader, SourceFile HeaderFile, Dictionary <Symbol, SourceFile> SymbolToHeader, LineBasedTextWriter Log)
        {
            SymbolType Type;

            if (OriginalReader.Current.Text == "class")
            {
                Type = SymbolType.Class;
            }
            else if (OriginalReader.Current.Text == "struct")
            {
                Type = SymbolType.Struct;
            }
            else
            {
                return(false);
            }

            TokenReader Reader = new TokenReader(OriginalReader);

            if (!Reader.MoveNext(TokenReaderContext.IgnoreNewlines) || Reader.Current.Type != TokenType.Identifier)
            {
                return(false);
            }

            Token Identifier = Reader.Current;

            if (!Reader.MoveNext(TokenReaderContext.IgnoreNewlines) || Reader.Current.Text != ";")
            {
                return(false);
            }

            AddForwardDeclaration(HeaderFile, Identifier.Text, Type, SymbolToHeader, Log);
            OriginalReader.Set(Reader);
            return(true);
        }
Example #15
0
 /// <summary>
 /// Read all the forward declarations from a file
 /// </summary>
 /// <param name="HeaderFile">The file to read from</param>
 /// <param name="SymbolToHeader">Map of symbol to the file containing it</param>
 /// <param name="Log">Writer for warnings and errors</param>
 public bool ReadForwardDeclarations(SourceFile HeaderFile, Dictionary <Symbol, SourceFile> SymbolToHeader, LineBasedTextWriter Log)
 {
     foreach (PreprocessorMarkup Markup in HeaderFile.Markup.Where(x => x.Type == PreprocessorMarkupType.Text))
     {
         TokenReader Reader = new TokenReader(HeaderFile.Text, Markup.Location, Markup.EndLocation);
         while (Reader.MoveNext(TokenReaderContext.IgnoreNewlines))
         {
             if (!ReadClassOrStructForwardDeclaration(Reader, HeaderFile, SymbolToHeader, Log) &&
                 !ReadTemplateClassOrStructForwardDeclaration(Reader, HeaderFile, SymbolToHeader, Log) &&
                 !ReadEnumClassForwardDeclaration(Reader, HeaderFile, SymbolToHeader, Log) &&
                 !SkipAliasDeclaration(Reader))
             {
                 Log.WriteLine("{0}({1}): error: invalid forward declaration - '{2}'", HeaderFile.Location, Reader.CurrentLine + 1, HeaderFile.Text[Reader.CurrentLine]);
                 return(false);
             }
         }
     }
     return(true);
 }
Example #16
0
        /// <summary>
        /// Generate a report showing the number of preprocessed lines in the selected files
        /// </summary>
        /// <param name="ReportFileLocation">Output file for the report</param>
        /// <param name="InputDir"></param>
        /// <param name="Target"></param>
        /// <param name="SourceFileToCompileEnvironment"></param>
        /// <param name="Log">Writer for log output</param>
        public static void Generate(FileReference ReportFileLocation, DirectoryReference InputDir, BuildTarget Target, Dictionary <SourceFile, CompileEnvironment> SourceFileToCompileEnvironment, LineBasedTextWriter Log)
        {
            Log.WriteLine("Writing {0}...", ReportFileLocation.FullName);

            // Create a map from source file to the number of times it's included
            Dictionary <FileReference, PchInfo> FileToPchInfo = new Dictionary <FileReference, PchInfo>();

            FindPchInfo(Target, SourceFileToCompileEnvironment, FileToPchInfo, Log);

            // Write out a CSV report containing the list of files and their line counts
            using (StreamWriter Writer = new StreamWriter(ReportFileLocation.FullName))
            {
                Writer.WriteLine("PCH,File,Num Includes,Pct Includes");
                foreach (FileReference PchFile in FileToPchInfo.Keys)
                {
                    PchInfo PchInfo = FileToPchInfo[PchFile];
                    foreach (KeyValuePair <SourceFile, int> Pair in PchInfo.IncludedFiles.OrderByDescending(x => x.Value))
                    {
                        if ((Pair.Key.Flags & SourceFileFlags.Pinned) == 0 && (Pair.Key.Flags & SourceFileFlags.External) == 0 && (Pair.Key.Flags & SourceFileFlags.Inline) == 0)
                        {
                            Writer.WriteLine("{0},{1},{2},{3:0.00}", PchFile.GetFileName(), Pair.Key.Location.MakeRelativeTo(InputDir), Pair.Value, (Pair.Value * 100.0) / PchInfo.SourceFiles.Count);
                        }
                    }
                }
            }
        }
Example #17
0
        /// <summary>
        /// Wait for this worker to complete
        /// </summary>
        /// <param name="Writer">Writer for log output</param>
        /// <returns>Return code from the thread</returns>
        public SequenceProbeResult Join(LineBasedTextWriter Writer)
        {
            // Finish the task instance
            BufferedTextWriter BufferedWriter = new BufferedTextWriter();
            int ExitCode = ActiveInstance.Join(BufferedWriter);

            ActiveInstance.Dispose();
            ActiveInstance = null;

            // Read the new state
            FileReference  OutputFile = new FileReference(TaskStateFile.FullName + ".out");
            SequenceWorker NewWorker  = SequenceWorker.Deserialize(OutputFile.FullName);

            OutputFile.Delete();

            // Make sure the exit code reflects the failure state. XGE can sometimes fail transferring back.
            if (ExitCode == 0 && !NewWorker.bResult)
            {
                ExitCode = -1;
            }

            // If it's a hard failure, print the compile log to the regular log
            if (ExitCode == -1)
            {
                Writer.WriteLine("Failed to compile {0}, exit code {1}:", UniqueName, ExitCode);
                foreach (string Line in NewWorker.CompileLog)
                {
                    Writer.WriteLine("    > {0}", Line.Replace("error:", "err:"));
                }
            }

            // Annotate the log data if this is from a failed attempt. It still may be useful for debugging purposes.
            if (ExitCode != 0)
            {
                NewWorker.SummaryLog.Insert(0, String.Format("ExitCode={0}", ExitCode));
                NewWorker.SummaryLog = NewWorker.SummaryLog.Select(x => "FAIL > " + x).ToList();

                NewWorker.CompileLog.Insert(0, String.Format("ExitCode={0}", ExitCode));
                NewWorker.CompileLog = NewWorker.CompileLog.Select(x => "FAIL > " + x).ToList();
            }

            // Append the log data back to the local output
            File.AppendAllLines(SummaryLogFile.FullName, NewWorker.SummaryLog);
            NewWorker.SummaryLog.Clear();

            File.AppendAllLines(CompileLogFile.FullName, NewWorker.CompileLog);
            NewWorker.CompileLog.Clear();

            // If we failed, return the
            if (ExitCode != 0)
            {
                if (ExitCode == -1)
                {
                    Writer.WriteLine("Warning: Failed to compile {0}, exit code {1}. Aborting.", UniqueName, ExitCode);
                    return(SequenceProbeResult.Failed);
                }
                else
                {
                    Writer.WriteLine("Warning: Failed to compile {0}; exit code {1}. Will retry.", UniqueName, ExitCode);
                    return(SequenceProbeResult.FailedAllowRetry);
                }
            }

            // Update the task
            Worker = NewWorker;

            // Check if this is just an incremental update
            if (Type == SequenceProbeType.Verify)
            {
                // Save the task
                Worker.Serialize(TaskStateFile.FullName);

                // Return that we're done
                return(SequenceProbeResult.Completed);
            }
            else if (Type == SequenceProbeType.Optimize)
            {
                if (Worker.RemainingFragmentCount > 0)
                {
                    // Get the top-most fragment - the one we've just established is a dependency for this leaf node - and add it to the list of known dependencies
                    SourceFragment NextFragment = Fragments[Worker.RemainingFragmentCount - 1];
                    AddDependency(Worker, Fragments, Worker.RemainingFragmentCount - 1);
                    Worker.SummaryLog.Add(String.Format("         [Added {0}: {1}]", Worker.RemainingFragmentCount - 1, Fragments[Worker.RemainingFragmentCount - 1].Location));

                    // Save the task
                    Worker.Serialize(TaskStateFile.FullName);

                    // Otherwise, return that we've just updated
                    return(SequenceProbeResult.Updated);
                }
                else
                {
                    // Save the task
                    Worker.Serialize(TaskStateFile.FullName);

                    // Return that we're done
                    SetCompletedDependencies();
                    return(SequenceProbeResult.Completed);
                }
            }
            else
            {
                throw new NotImplementedException();
            }
        }
Example #18
0
        /// <summary>
        /// Generate optimized PCHs which include headers used by a ratio of the source files
        /// </summary>
        /// <param name="Target">The target being built</param>
        /// <param name="SourceFileToCompileEnvironment">Source files to consider</param>
        /// <param name="IncludePaths">Include paths to base output includes from</param>
        /// <param name="SystemIncludePaths">System include paths to base output includes from</param>
        /// <param name="OutputFileContents">Dictionary which receives the files to output</param>
        /// <param name="MinRatio">Ratio of source files which must include a header for it to be included in the pch</param>
        /// <param name="Log">Writer for log output</param>
        public static void GeneratePCHs(BuildTarget Target, Dictionary <SourceFile, CompileEnvironment> SourceFileToCompileEnvironment, IEnumerable <DirectoryReference> IncludePaths, IEnumerable <DirectoryReference> SystemIncludePaths, Dictionary <FileReference, string> OutputFileContents, float MinRatio, LineBasedTextWriter Log)
        {
            Log.WriteLine("Optimizing precompiled headers...");

            // Create a map from source file to the number of times it's included
            Dictionary <FileReference, PchInfo> FileToPchInfo = new Dictionary <FileReference, PchInfo>();

            FindPchInfo(Target, SourceFileToCompileEnvironment, FileToPchInfo, Log);

            // Create an ordering of all the modules
            Dictionary <BuildModule, int> ModuleToIndex = new Dictionary <BuildModule, int>();

            FindModuleOrder(SourceFileToCompileEnvironment.Keys.Select(x => x.Module).Distinct(), ModuleToIndex, new HashSet <BuildModule>());

            // Create the output files
            foreach (FileReference PchFile in FileToPchInfo.Keys)
            {
                PchInfo PchInfo = FileToPchInfo[PchFile];

                // Get the minimum number of includes to use in the PCH
                int MinIncludes = (int)(MinRatio * PchInfo.SourceFiles.Count);

                // Get a list of all the files to include above a threshold
                List <SourceFile> IncludeFiles = new List <SourceFile>();
                foreach (SourceFile IncludedFile in PchInfo.IncludedFiles.Where(x => x.Value >= MinIncludes).OrderByDescending(x => x.Value).Select(x => x.Key))
                {
                    if ((IncludedFile.Flags & SourceFileFlags.Pinned) == 0 && (IncludedFile.Flags & SourceFileFlags.External) == 0 && (IncludedFile.Flags & SourceFileFlags.Inline) == 0 && PchInfo.PublicIncludePathModules.Contains(IncludedFile.Module))
                    {
                        IncludeFiles.Add(IncludedFile);
                    }
                }

                // Generate the output file text
                StringBuilder Contents = new StringBuilder();
                using (StringWriter Writer = new StringWriter(Contents))
                {
                    Writer.WriteLine("// Copyright 1998-{0} Epic Games, Inc. All Rights Reserved.", DateTime.Now.Year);
                    Writer.WriteLine();
                    Writer.WriteLine("#pragma once");
                    foreach (IGrouping <BuildModule, SourceFile> Group in IncludeFiles.GroupBy(x => x.Module).OrderBy(x => ModuleToIndex[x.Key]))
                    {
                        Writer.WriteLine();
                        Writer.WriteLine("// From {0}:", Group.Key.Name);
                        foreach (SourceFile IncludeFile in Group)
                        {
                            string Include;
                            if (SourceFile.TryFormatInclude(PchFile.Directory, IncludeFile.Location, IncludePaths, SystemIncludePaths, out Include))
                            {
                                Writer.WriteLine("#include {0}", Include);
                            }
                        }
                    }
                }

                // Add it to the output map
                OutputFileContents.Add(PchFile, Contents.ToString());
            }
        }
Example #19
0
        /// <summary>
        /// Find or create an output file for a corresponding input file
        /// </summary>
        /// <param name="InputFile">The input file</param>
        /// <param name="IncludeStack">The current include stack</param>
        /// <param name="OutputFiles">List of output files</param>
        /// <param name="OutputFileLookup">Mapping from source file to output file</param>
        /// <returns>The new or existing output file</returns>
        static OutputFile FindOrCreateOutputFile(List <SourceFile> InputFileStack, Dictionary <SourceFile, SourceFile> CppFileToHeaderFile, HashList <OutputFile> PreviousFiles, Dictionary <SourceFile, OutputFile> OutputFileLookup, Dictionary <Symbol, OutputFile> FwdSymbolToHeader, bool bMakeStandalone, bool bUseOriginalIncludes, LineBasedTextWriter Log)
        {
            // Get the file at the top of the stack
            SourceFile InputFile = InputFileStack[InputFileStack.Count - 1];

            // Try to find an existing file
            OutputFile OutputFile;

            if (OutputFileLookup.TryGetValue(InputFile, out OutputFile))
            {
                if (OutputFile == null)
                {
                    throw new Exception("Circular include dependencies are not allowed.");
                }
                foreach (OutputFile IncludedFile in OutputFile.OriginalIncludedFiles.Where(x => !PreviousFiles.Contains(x)))
                {
                    PreviousFiles.Add(IncludedFile);
                }
            }
            else
            {
                // Add a placeholder entry in the output file lookup, so we can detect circular include dependencies
                OutputFileLookup[InputFile] = null;

                // Duplicate the list of previously included files, so we can construct the
                List <OutputFile> PreviousFilesCopy = new List <OutputFile>(PreviousFiles);

                // Build a list of includes for this file. First include is a placeholder for any missing includes that need to be inserted.
                List <OutputFileInclude> Includes = new List <OutputFileInclude>();
                if ((InputFile.Flags & SourceFileFlags.External) == 0)
                {
                    for (int MarkupIdx = 0; MarkupIdx < InputFile.Markup.Length; MarkupIdx++)
                    {
                        PreprocessorMarkup Markup = InputFile.Markup[MarkupIdx];
                        if (Markup.IsActive && Markup.IncludedFile != null && (Markup.IncludedFile.Flags & SourceFileFlags.Inline) == 0 && Markup.IncludedFile.Counterpart == null)
                        {
                            InputFileStack.Add(Markup.IncludedFile);
                            OutputFile IncludeFile = FindOrCreateOutputFile(InputFileStack, CppFileToHeaderFile, PreviousFiles, OutputFileLookup, FwdSymbolToHeader, bMakeStandalone, bUseOriginalIncludes, Log);
                            InputFileStack.RemoveAt(InputFileStack.Count - 1);
                            Includes.Add(new OutputFileInclude(MarkupIdx, IncludeFile));
                        }
                    }
                }

                // Find the matching header file
                OutputFile HeaderFile = null;
                if ((InputFile.Flags & SourceFileFlags.TranslationUnit) != 0)
                {
                    SourceFile CandidateHeaderFile;
                    if (CppFileToHeaderFile.TryGetValue(InputFile, out CandidateHeaderFile) && (CandidateHeaderFile.Flags & SourceFileFlags.Standalone) != 0)
                    {
                        OutputFileLookup.TryGetValue(CandidateHeaderFile, out HeaderFile);
                    }
                }

                // Create the output file.
                if ((InputFile.Flags & SourceFileFlags.Output) != 0 && !bUseOriginalIncludes)
                {
                    OutputFile = CreateOptimizedOutputFile(InputFile, HeaderFile, PreviousFilesCopy, Includes, InputFileStack, FwdSymbolToHeader, bMakeStandalone, Log);
                }
                else
                {
                    OutputFile = CreatePassthroughOutputFile(InputFile, Includes, Log);
                }

                // Replace the null entry in the output file lookup that we added earlier
                OutputFileLookup[InputFile] = OutputFile;

                // Add this file to the list of included files
                PreviousFiles.Add(OutputFile);

                // If the output file dependends on something on the stack, make sure it's marked as pinned
                if ((InputFile.Flags & SourceFileFlags.Pinned) == 0)
                {
                    SourceFragment Dependency = OutputFile.Dependencies.FirstOrDefault(x => InputFileStack.Contains(x.File) && x.File != InputFile);
                    if (Dependency != null)
                    {
                        throw new Exception(String.Format("'{0}' is not marked as pinned, but depends on '{1}' which includes it", InputFile.Location.GetFileName(), Dependency.UniqueName));
                    }
                }
            }
            return(OutputFile);
        }
Example #20
0
 /// <summary>
 /// Compiles the permutation in its current state
 /// </summary>
 /// <param name="Writer">Writer for diagnostic messages</param>
 public void Verify(LineBasedTextWriter Writer)
 {
     bResult = CompilePermutation(new List <int> {
         RemainingFragmentCount
     }, 0, 0, 0);
 }
Example #21
0
        /// <summary>
        /// Creates an output file which represents the same includes as the inpu tfile
        /// </summary>
        /// <param name="InputFile">The input file that this output file corresponds to</param>
        /// <param name="Includes">The active set of includes parsed for this file</param>
        /// <param name="Log">Writer for log messages</param>
        public static OutputFile CreatePassthroughOutputFile(SourceFile InputFile, List <OutputFileInclude> Includes, LineBasedTextWriter Log)
        {
            // Write the state
            InputFile.LogVerbose("InputFile={0}", InputFile.Location.FullName);
            InputFile.LogVerbose("Duplicate.");

            // Reduce the list of includes to those that are required.
            HashSet <SourceFragment> Dependencies = new HashSet <SourceFragment>();

            for (int FragmentIdx = InputFile.Fragments.Length - 1, IncludeIdx = Includes.Count - 1; FragmentIdx >= 0; FragmentIdx--)
            {
                // Update the dependency lists for this fragment
                SourceFragment InputFragment = InputFile.Fragments[FragmentIdx];
                if (InputFragment.Dependencies != null)
                {
                    Dependencies.UnionWith(InputFragment.Dependencies);
                }
                Dependencies.Remove(InputFragment);

                // Scan backwards through the list of includes, expanding each include to those which are required
                int MarkupMin = (FragmentIdx == 0)? -1 : InputFragment.MarkupMin;
                for (; IncludeIdx >= 0 && Includes[IncludeIdx].MarkupIdx >= MarkupMin; IncludeIdx--)
                {
                    OutputFileInclude Include = Includes[IncludeIdx];
                    Include.FinalFiles.Add(Include.TargetFile);
                    Dependencies.ExceptWith(Include.TargetFile.IncludedFragments);
                    Dependencies.UnionWith(Include.TargetFile.Dependencies);
                }
            }

            // Create the optimized file
            return(new OutputFile(InputFile, Includes, new HashSet <SourceFragment>(), new List <Symbol>()));
        }
Example #22
0
        /// <summary>
        /// Find the next dependency
        /// </summary>
        /// <param name="Writer">Writer for diagnostic messages</param>
        public void FindNextDependency(LineBasedTextWriter Writer)
        {
            // Update the required count for all the dependencies that are explicitly listed
            while (KnownDependencies.Contains(RemainingFragmentCount - 1))
            {
                RemainingFragmentCount--;
            }

            // Build a list of all valid potential dependency counts. This range is inclusive, since all and none are both valid.
            List <int> PotentialDependencyCounts = new List <int> {
                0
            };

            for (int Idx = 0; Idx < RemainingFragmentCount; Idx++)
            {
                if (!KnownDependencies.Contains(Idx))
                {
                    PotentialDependencyCounts.Add(Idx + 1);
                }
            }

            // On the first run, make sure the probe compiles with everything enabled
            int MaxRequired = PotentialDependencyCounts.Count - 1;

            if (CompileCount == 0)
            {
                bResult = CompilePermutation(PotentialDependencyCounts, 0, MaxRequired, MaxRequired);
                if (!bResult)
                {
                    return;
                }
            }

            // Iteratively try to eliminate as many headers as possible.
            for (int MinRequired = 0; MinRequired < MaxRequired;)
            {
                // Simple binary search; round to zero. Pivot is a count of PotentialDependencies, where MinRequired <= Pivot < MaxRequired always.
                int Pivot = (MinRequired + MaxRequired) / 2;

                // Allow overriding the pivot in single-step mode, to optimize for situations where there's a sequence of dependencies
                if (bSingleStepMode)
                {
                    Pivot           = MaxRequired - 1;
                    bSingleStepMode = false;
                }

                // Compile it
                bResult = CompilePermutation(PotentialDependencyCounts, MinRequired, Pivot, MaxRequired);

                // Update the range for the next step
                if (bResult)
                {
                    MaxRequired = Pivot;
                }
                else
                {
                    MinRequired = Pivot + 1;
                }
            }

            // Make sure we always finish on a successful compile, just to prevent any errors propagating to the next iteration
            if (!bResult)
            {
                bResult = CompilePermutation(PotentialDependencyCounts, MaxRequired, MaxRequired, MaxRequired);
                if (!bResult)
                {
                    return;
                }
            }

            // Switch to single-step mode if we picked the last viable dependency
            if (MaxRequired == PotentialDependencyCounts.Count - 1)
            {
                bSingleStepMode = true;
            }

            // Now that we've finished compiling, store off the new maximum number of required dependencies
            RemainingFragmentCount = PotentialDependencyCounts[MaxRequired];

            // Write out the final include order once we're done
            if (RemainingFragmentCount == 0)
            {
                Log(SummaryLog, "");
                Log(SummaryLog, "Final include order:");
                foreach (int Idx in KnownDependencies)
                {
                    Log(SummaryLog, "{0,8}: {1}", Idx, FragmentFileNames[Idx]);
                }
            }
        }
        /// <summary>
        /// Gather compile time telemetry for the given files
        /// </summary>
        /// <param name="FileToCompileEnvironment">Mapping of source file to the environment used to compile it</param>
        /// <param name="WorkingDir">The working directory for output files</param>
        /// <param name="NumSamples">Number of samples to take</param>
        /// <param name="MaxParallel">Maximum number of tasks to run in parallel.</param>
        /// <param name="Log">Log writer</param>
        public static void Generate(DirectoryReference InputDir, DirectoryReference WorkingDir, Dictionary <SourceFile, CompileEnvironment> FileToCompileEnvironment, int NumSamples, int Shard, int NumShards, int MaxParallel, LineBasedTextWriter Log)
        {
            Stopwatch Timer = Stopwatch.StartNew();

            // Create an intermediate directory
            DirectoryReference IntermediateDir = DirectoryReference.Combine(WorkingDir, "Timing");

            IntermediateDir.CreateDirectory();

            // Map of unique fragment to timing data
            Dictionary <SourceFragment, FragmentTimingData> FragmentToTimingData = new Dictionary <SourceFragment, FragmentTimingData>();

            // Map of unique fragment key to timing data
            Dictionary <string, FragmentTimingData> DigestToTimingData = new Dictionary <string, FragmentTimingData>();

            // List of all the sequences to time
            HashSet <string> UniqueNames = new HashSet <string>();

            foreach (KeyValuePair <SourceFile, CompileEnvironment> Pair in FileToCompileEnvironment)
            {
                // Find all the fragments in this file
                List <SourceFragment>           Fragments      = new List <SourceFragment>();
                List <Tuple <int, SourceFile> > IncludeHistory = new List <Tuple <int, SourceFile> >();
                Pair.Key.FindIncludedFragments(Fragments, IncludeHistory, new HashSet <SourceFile>());

                // Create a sequence for each unique fragment
                FragmentTimingData PrevTimingData = null;
                for (int Idx = 0; Idx < Fragments.Count; Idx++)
                {
                    FragmentTimingData TimingData = null;
                    if (!FragmentToTimingData.ContainsKey(Fragments[Idx]) || (Idx + 1 < Fragments.Count && !FragmentToTimingData.ContainsKey(Fragments[Idx + 1])))
                    {
                        // Create a sequence for this fragment
                        SourceFragment LastFragment = Fragments[Idx];

                        // Create a unique key for this sequence by concatenating all the fragment names
                        string Digest = Utility.ComputeDigest(String.Join("\n", Fragments.Take(Idx + 1).Select(x => x.Location.FullName)));

                        // Try to get an existing sequence for this key, otherwise create a new one;
                        if (!DigestToTimingData.TryGetValue(Digest, out TimingData))
                        {
                            // Find a unique name for this sequence
                            string UniqueName = LastFragment.Location.GetFileName();
                            for (int NameIdx = 2; !UniqueNames.Add(UniqueName); NameIdx++)
                            {
                                UniqueName = String.Format("{0}_{1}{2}", LastFragment.Location.GetFileNameWithoutExtension(), NameIdx, LastFragment.Location.GetExtension());
                            }

                            // Add the object for this sequence
                            FileReference IntermediateFile = FileReference.Combine(IntermediateDir, UniqueName);
                            TimingData = new FragmentTimingData(UniqueName, Digest, PrevTimingData, Fragments.Take(Idx + 1).ToArray(), IncludeHistory, IntermediateFile, Pair.Value);
                            DigestToTimingData.Add(Digest, TimingData);
                        }

                        // Add it to the unique mapping of fragments
                        if (!FragmentToTimingData.ContainsKey(LastFragment))
                        {
                            FragmentToTimingData[LastFragment] = TimingData;
                        }
                    }
                    PrevTimingData = TimingData;
                }
            }

            // Read any existing shard timing data in the output folder
            foreach (FileReference IntermediateFile in IntermediateDir.EnumerateFileReferences("*.csv"))
            {
                string[] Lines = File.ReadAllLines(IntermediateFile.FullName);
                foreach (string Line in Lines.Skip(1))
                {
                    string[] Tokens = Line.Split(',');
                    if (Tokens.Length == 5)
                    {
                        FragmentTimingData TimingData;
                        if (DigestToTimingData.TryGetValue(Tokens[1], out TimingData) && TimingData.Samples.Count < NumSamples)
                        {
                            FragmentTimingSample Sample = new FragmentTimingSample();
                            Sample.TotalTime    = Double.Parse(Tokens[2]);
                            Sample.FrontendTime = Double.Parse(Tokens[3]);
                            Sample.BackendTime  = Double.Parse(Tokens[4]);
                            TimingData.Samples.Add(Sample);
                        }
                    }
                }
            }

            // Find all the remaining fragments, and repeat each one by the number of times it has to be executed
            List <FragmentTimingData> FilteredFragments = DigestToTimingData.Values.ToList();

            FilteredFragments.RemoveAll(x => (int)(Math.Abs((long)x.Digest.GetHashCode()) % NumShards) != (Shard - 1));

            // Get the initial number of compile times for each fragment. We avoid saving before this number.
            List <int> InitialCompileCount = FilteredFragments.Select(x => x.Samples.Count).ToList();

            // Create all the actions to execute
            List <Action> Actions = new List <Action>();

            foreach (FragmentTimingData Fragment in FilteredFragments)
            {
                FragmentTimingData FragmentCopy = Fragment;
                for (int SampleIdx = Fragment.Samples.Count; SampleIdx < NumSamples; SampleIdx++)
                {
                    int SampleIdxCopy = SampleIdx;
                    Actions.Add(() => FragmentCopy.Compile(IntermediateDir, SampleIdxCopy));
                }
            }

            // Randomize the order to ensure that compile times are not consistently affected by other files being compiled simultaneously.
            Random Random = new Random();

            Actions = Actions.OrderBy(x => Random.Next()).ToList();

            // Compile them all
            if (Actions.Count > 0)
            {
                Utility.ParallelForWithStatus("Compiling fragments...", 0, Actions.Count, new ParallelOptions {
                    MaxDegreeOfParallelism = MaxParallel
                }, Idx => Actions[Idx](), Log);
            }

            // Write out the results
            if (NumShards > 1)
            {
                // If we're running a sharded build, write out intermediate files containing the results
                FileReference OutputFile = FileReference.Combine(IntermediateDir, String.Format("Shard{0}.csv", Shard));
                using (StreamWriter Writer = new StreamWriter(OutputFile.FullName))
                {
                    Writer.WriteLine("Name,Digest,TotalTime,FrontendTime,BackendTime");
                    for (int Idx = 0; Idx < FilteredFragments.Count; Idx++)
                    {
                        FragmentTimingData FilteredFragment = FilteredFragments[Idx];
                        for (int SampleIdx = InitialCompileCount[Idx]; SampleIdx < FilteredFragment.Samples.Count; SampleIdx++)
                        {
                            FragmentTimingSample Sample = FilteredFragment.Samples[SampleIdx];
                            Writer.WriteLine("{0},{1},{2},{3},{4}", FilteredFragment.UniqueName, FilteredFragment.Digest, Sample.TotalTime, Sample.FrontendTime, Sample.BackendTime);
                        }
                    }
                }
            }
            else
            {
                // Write out the fragment report
                FileReference FragmentReport = FileReference.Combine(WorkingDir, "Timing.csv");
                Log.WriteLine("Writing {0}...", FragmentReport);
                using (StreamWriter Writer = new StreamWriter(FragmentReport.FullName))
                {
                    // Write the header
                    Writer.Write("Fragment,MinLine,MaxLine");

                    // Write the labels for each sample type
                    string[] Types = new string[] { "Total", "Frontend", "Backend" };
                    for (int Idx = 0; Idx < Types.Length; Idx++)
                    {
                        for (int SampleIdx = 0; SampleIdx < NumSamples; SampleIdx++)
                        {
                            Writer.Write(",{0}{1}", Types[Idx], SampleIdx + 1);
                        }
                        Writer.Write(",{0}Min,{0}Max,{0}Avg,{0}Exc", Types[Idx]);
                    }
                    Writer.WriteLine();

                    // Write all the results
                    Func <FragmentTimingSample, double>[] TimeFieldDelegates = new Func <FragmentTimingSample, double>[] { x => x.TotalTime, x => x.FrontendTime, x => x.BackendTime };
                    foreach (FragmentTimingData TimingData in FragmentToTimingData.Values)
                    {
                        Writer.Write("{0},{1},{2}", TimingData.Fragment.Location.GetFileName(), TimingData.Fragment.MarkupMin + 1, TimingData.Fragment.MarkupMax + 1);
                        foreach (Func <FragmentTimingSample, double> TimeFieldDelegate in TimeFieldDelegates)
                        {
                            foreach (FragmentTimingSample Sample in TimingData.Samples)
                            {
                                Writer.Write(",{0:0.000}", TimeFieldDelegate(Sample));
                            }

                            Writer.Write(",{0:0.000}", TimingData.Samples.Min(x => TimeFieldDelegate(x)));
                            Writer.Write(",{0:0.000}", TimingData.Samples.Max(x => TimeFieldDelegate(x)));
                            Writer.Write(",{0:0.000}", TimingData.Samples.Average(x => TimeFieldDelegate(x)));

                            if (TimingData.PrevFragmentData == null)
                            {
                                Writer.Write(",{0:0.000}", TimingData.Samples.Average(x => TimeFieldDelegate(x)));
                            }
                            else
                            {
                                Writer.Write(",{0:0.000}", TimingData.Samples.Average(x => TimeFieldDelegate(x)) - TimingData.PrevFragmentData.Samples.Average(x => TimeFieldDelegate(x)));
                            }
                        }
                        Writer.WriteLine();
                    }
                }
            }
        }
Example #24
0
        /// <summary>
        /// Write out an optimized file to the given location
        /// </summary>
        /// <param name="IncludePaths">Base directories for relative include paths</param>
        /// <param name="SystemIncludePaths">Base directories for system include paths</param>
        /// <param name="Writer">Writer for the output text</param>
        public void Write(IEnumerable <DirectoryReference> IncludePaths, IEnumerable <DirectoryReference> SystemIncludePaths, TextWriter Writer, bool bRemoveForwardDeclarations, LineBasedTextWriter Log)
        {
            // Write the file header
            TextLocation LastLocation = Text.Start;

            // Write the standalone includes
            if (MissingIncludes.Count > 0)
            {
                TextLocation BoilerplateLocation = (BodyMinIdx == Markup.Length || (Markup[BodyMinIdx].Type != PreprocessorMarkupType.Include && BodyMinIdx > 0))? Markup[BodyMinIdx - 1].EndLocation : Markup[BodyMinIdx].Location;
                WriteLines(Text, LastLocation, BoilerplateLocation, Writer);
                LastLocation = BoilerplateLocation;

                if (LastLocation.LineIdx > 0 && Text.Lines[LastLocation.LineIdx - 1].TrimEnd().Length > 0)
                {
                    if (LastLocation.LineIdx + 1 < Text.Lines.Length && Text.Lines[LastLocation.LineIdx].TrimEnd().Length == 0 && Text.Lines[LastLocation.LineIdx + 1].TrimEnd().Length == 0)
                    {
                        LastLocation.LineIdx++;
                    }
                    Writer.WriteLine();
                }
                foreach (SourceFile MissingInclude in MissingIncludes)
                {
                    string IncludeText = FormatInclude(Location.Directory, MissingInclude.Location, IncludePaths, SystemIncludePaths, Log);
                    Writer.WriteLine("#include {0}", IncludeText);
                }
            }

            // Figure out before which markup object to write forward declarations, skipping over all the includes at the start of the file
            int ForwardDeclarationsBeforeMarkupIdx = -1;

            if ((Flags & SourceFileFlags.TranslationUnit) == 0)
            {
                int ConditionDepth = 0;
                for (int MarkupIdx = BodyMinIdx; MarkupIdx < Markup.Length; MarkupIdx++)
                {
                    if (ConditionDepth == 0)
                    {
                        ForwardDeclarationsBeforeMarkupIdx = MarkupIdx;
                    }
                    if (Markup[MarkupIdx].Type == PreprocessorMarkupType.Text)
                    {
                        break;
                    }
                    ConditionDepth += Markup[MarkupIdx].GetConditionDepthDelta();
                }
            }

            // Write all the other markup
            for (int MarkupIdx = BodyMinIdx; MarkupIdx < Markup.Length; MarkupIdx++)
            {
                PreprocessorMarkup ThisMarkup = Markup[MarkupIdx];

                // Write the forward declarations
                if (MarkupIdx == ForwardDeclarationsBeforeMarkupIdx)
                {
                    // Write out at least up to the end of the last markup
                    if (MarkupIdx > 0 && LastLocation <= Markup[MarkupIdx - 1].EndLocation)
                    {
                        WriteLines(Text, LastLocation, Markup[MarkupIdx - 1].EndLocation, Writer);
                        LastLocation = Markup[MarkupIdx - 1].EndLocation;
                    }

                    // Skip a blank line in the existing text.
                    TextLocation NewLastLocation = LastLocation;
                    if (LastLocation.LineIdx < Text.Lines.Length && String.IsNullOrWhiteSpace(Text.Lines[LastLocation.LineIdx]))
                    {
                        NewLastLocation = new TextLocation(LastLocation.LineIdx + 1, 0);
                    }

                    // Merge all the existing forward declarations with the new set.
                    HashSet <string> PreviousForwardDeclarations = new HashSet <string>();
                    while (NewLastLocation.LineIdx < Text.Lines.Length)
                    {
                        string TrimLine = Text.Lines[NewLastLocation.LineIdx].Trim();
                        if (TrimLine.Length > 0 && !TrimLine.Equals("// Forward declarations", StringComparison.OrdinalIgnoreCase) && !TrimLine.Equals("// Forward declarations.", StringComparison.OrdinalIgnoreCase))
                        {
                            // Create a token reader for the current line
                            TokenReader Reader = new TokenReader(Text, new TextLocation(NewLastLocation.LineIdx, 0), new TextLocation(NewLastLocation.LineIdx, Text.Lines[NewLastLocation.LineIdx].Length));

                            // Read it into a buffer
                            List <Token> Tokens = new List <Token>();
                            while (Reader.MoveNext())
                            {
                                Tokens.Add(Reader.Current);
                            }

                            // Check it matches the syntax for a forward declaration, and add it to the list if it does
                            if (Tokens.Count == 3 && (Tokens[0].Text == "struct" || Tokens[0].Text == "class") && Tokens[1].Type == TokenType.Identifier && Tokens[2].Text == ";")
                            {
                                PreviousForwardDeclarations.Add(String.Format("{0} {1};", Tokens[0].Text, Tokens[1].Text));
                            }
                            else if (Tokens.Count == 4 && Tokens[0].Text == "enum" && Tokens[1].Text == "class" && Tokens[2].Type == TokenType.Identifier && Tokens[3].Text == ";")
                            {
                                PreviousForwardDeclarations.Add(String.Format("enum class {0};", Tokens[2].Text));
                            }
                            else if (Tokens.Count == 6 && Tokens[0].Text == "enum" && Tokens[1].Text == "class" && Tokens[2].Type == TokenType.Identifier && Tokens[3].Text == ":" && Tokens[4].Type == TokenType.Identifier && Tokens[5].Text == ";")
                            {
                                PreviousForwardDeclarations.Add(String.Format("enum class {0} : {1};", Tokens[2].Text, Tokens[4].Text));
                            }
                            else if (ForwardDeclarations.Contains(Text.Lines[NewLastLocation.LineIdx]))
                            {
                                PreviousForwardDeclarations.Add(Text.Lines[NewLastLocation.LineIdx]);
                            }
                            else
                            {
                                break;
                            }
                        }
                        NewLastLocation = new TextLocation(NewLastLocation.LineIdx + 1, 0);
                    }

                    // Create a full list of new forward declarations, combining with the ones that are already there. Normally we optimize with the forward declarations present,
                    // so we shouldn't remove any unless running a specific pass designed to do so.
                    HashSet <string> MergedForwardDeclarations = new HashSet <string>(ForwardDeclarations);
                    if (!bRemoveForwardDeclarations)
                    {
                        MergedForwardDeclarations.UnionWith(PreviousForwardDeclarations);
                    }

                    // Write them out
                    if (MergedForwardDeclarations.Count > 0)
                    {
                        Writer.WriteLine();
                        foreach (string ForwardDeclaration in MergedForwardDeclarations.Distinct().OrderBy(x => GetForwardDeclarationSortKey(x)).ThenBy(x => x))
                        {
                            Writer.WriteLine("{0}{1}", GetIndent(MarkupIdx), ForwardDeclaration);
                        }
                        Writer.WriteLine();
                        LastLocation = NewLastLocation;
                    }
                    else if (PreviousForwardDeclarations.Count > 0)
                    {
                        Writer.WriteLine();
                        LastLocation = NewLastLocation;
                    }
                }

                // Write the includes
                if (ThisMarkup.Type == PreprocessorMarkupType.Include && ThisMarkup.IsActive && !ThisMarkup.IsInlineInclude())
                {
                    // Write up to the start of this include
                    WriteLines(Text, LastLocation, ThisMarkup.Location, Writer);

                    // Get the original include text. Some modules - particularly editor modules - include headers from other modules based from Engine/Source which are not listed as dependencies. If
                    // the original include is based from a shallower directory than the one we would include otherwise, we'll use that instead.
                    string OriginalIncludeText = null;
                    if (ThisMarkup.Tokens.Count == 1)
                    {
                        OriginalIncludeText = ThisMarkup.Tokens[0].Text.Replace('\\', '/');
                    }

                    // Write the replacement includes
                    foreach (SourceFile OutputIncludedFile in ThisMarkup.OutputIncludedFiles)
                    {
                        string IncludeText = FormatInclude(Location.Directory, OutputIncludedFile.Location, IncludePaths, SystemIncludePaths, Log);
                        if (OutputIncludedFile == ThisMarkup.IncludedFile && Rules.IsExternalIncludeMacro(ThisMarkup.Tokens))
                        {
                            IncludeText = Token.Format(ThisMarkup.Tokens);
                        }
                        else if (OutputIncludedFile == ThisMarkup.IncludedFile && (OutputIncludedFile.Flags & SourceFileFlags.External) != 0)
                        {
                            IncludeText = OriginalIncludeText;
                        }
                        else if (OriginalIncludeText != null && (Flags & SourceFileFlags.TranslationUnit) == 0 && OriginalIncludeText.EndsWith(IncludeText.TrimStart('\"'), StringComparison.OrdinalIgnoreCase) && (OriginalIncludeText.StartsWith("\"Runtime/", StringComparison.InvariantCultureIgnoreCase) || OriginalIncludeText.StartsWith("\"Developer/", StringComparison.InvariantCultureIgnoreCase) || OriginalIncludeText.StartsWith("\"Editor/", StringComparison.InvariantCultureIgnoreCase)))
                        {
                            IncludeText = OriginalIncludeText;
                        }
                        Writer.WriteLine("{0}#include {1}", GetIndent(MarkupIdx), IncludeText);
                    }

                    // Copy any formatting
                    if (ThisMarkup.EndLocation.LineIdx > ThisMarkup.Location.LineIdx + 1)
                    {
                        WriteLines(Text, new TextLocation(ThisMarkup.Location.LineIdx + 1, 0), ThisMarkup.EndLocation, Writer);
                    }

                    // Update the location to the start of the next line
                    LastLocation = new TextLocation(ThisMarkup.Location.LineIdx + 1, 0);
                }
            }

            // Write to the end of the file
            WriteLines(Text, LastLocation, Text.End, Writer);
        }