/// <summary>
        /// Execute the task.
        /// </summary>
        /// <param name="Job">Information about the current job</param>
        /// <param name="BuildProducts">Set of build products produced by this node.</param>
        /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param>
        /// <returns>True if the task succeeded</returns>
        public override bool Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet)
        {
            DirectoryReference FromDir = ResolveDirectory(Parameters.FromDir);

            // Find all the input files
            IEnumerable <FileReference> Files;

            if (Parameters.Files == null)
            {
                Files = FromDir.EnumerateFileReferences("*", System.IO.SearchOption.AllDirectories);
            }
            else
            {
                Files = ResolveFilespec(FromDir, Parameters.Files, TagNameToFileSet);
            }

            // Create the zip file
            FileReference ArchiveFile = ResolveFile(Parameters.ZipFile);

            CommandUtils.ZipFiles(ArchiveFile, FromDir, Files);

            // Apply the optional tag to the produced archive
            if (!String.IsNullOrEmpty(Parameters.Tag))
            {
                FindOrAddTagSet(TagNameToFileSet, Parameters.Tag).Add(ArchiveFile);
            }

            // Add the archive to the set of build products
            BuildProducts.Add(ArchiveFile);
            return(true);
        }
Beispiel #2
0
        /// <summary>
        /// Execute the task and run the cooker.
        /// </summary>
        /// <param name="BuildProducts">List of build products for the current node. Cooking build products will be appended to this list.</param>
        /// <returns>True if the node succeeded</returns>
        public override bool Execute(List <string> BuildProducts)
        {
            // Figure out the project that this target belongs to
            FileReference ProjectFile;

            if (UProjectInfo.TryGetProjectForTarget(Target, out ProjectFile))
            {
                ProjectFile = null;
            }

            // Execute the cooker
            using (TelemetryStopwatch CookStopwatch = new TelemetryStopwatch("Cook.{0}.{1}", ProjectFile.GetFileNameWithoutExtension(), CookPlatform))
            {
                CommandUtils.CookCommandlet(ProjectFile, "UE4Editor-Cmd.exe", Maps, null, null, null, CookPlatform, Arguments);
            }

            // Find all the cooked files
            DirectoryReference   CookedDirectory = DirectoryReference.Combine(ProjectFile.Directory, "Saved", "Cooked", CookPlatform);
            List <FileReference> CookedFiles     = CookedDirectory.EnumerateFileReferences().ToList();

            if (CookedFiles.Count == 0)
            {
                throw new AutomationException("Cooking did not produce any files in {0}", CookedDirectory.FullName);
            }
            BuildProducts.AddRange(CookedFiles.Select(x => x.FullName));
            return(true);
        }
		static void FindFilesInternal(DirectoryReference Directory, string[] ExcludedDirectorySuffixes, List<FileReference> FoundFiles)
		{
			foreach (FileReference File in Directory.EnumerateFileReferences())
			{
				if (ShouldInclude(File, DefaultExcludedFileSuffixes))
				{
					FoundFiles.Add(File);
				}
			}
		}
Beispiel #4
0
        /// <summary>
        /// Execute the task.
        /// </summary>
        /// <param name="Job">Information about the current job</param>
        /// <param name="BuildProducts">Set of build products produced by this node.</param>
        /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param>
        /// <returns>True if the task succeeded</returns>
        public override bool Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet)
        {
            // Figure out the project that this target belongs to
            FileReference ProjectFile = null;

            if (Parameters.Project != null)
            {
                ProjectFile = new FileReference(Parameters.Project);
                if (!ProjectFile.Exists())
                {
                    CommandUtils.LogError("Missing project file - {0}", ProjectFile.FullName);
                    return(false);
                }
            }

            // Execute the cooker
            using (TelemetryStopwatch CookStopwatch = new TelemetryStopwatch("Cook.{0}.{1}", (ProjectFile == null)? "UE4" : ProjectFile.GetFileNameWithoutExtension(), Parameters.Platform))
            {
                string[] Maps      = (Parameters.Maps == null)? null : Parameters.Maps.Split(new char[] { '+' });
                string   Arguments = (Parameters.Versioned ? "" : "-Unversioned ") + "-LogCmds=\"LogSavePackage Warning\" " + Parameters.Arguments;
                CommandUtils.CookCommandlet(ProjectFile, "UE4Editor-Cmd.exe", Maps, null, null, null, Parameters.Platform, Arguments);
            }

            // Find all the cooked files
            List <FileReference> CookedFiles = new List <FileReference>();

            foreach (string Platform in Parameters.Platform.Split('+'))
            {
                DirectoryReference PlatformCookedDirectory = DirectoryReference.Combine(ProjectFile.Directory, "Saved", "Cooked", Platform);
                if (!PlatformCookedDirectory.Exists())
                {
                    CommandUtils.LogError("Cook output directory not found ({0})", PlatformCookedDirectory.FullName);
                    return(false);
                }
                List <FileReference> PlatformCookedFiles = PlatformCookedDirectory.EnumerateFileReferences("*", System.IO.SearchOption.AllDirectories).ToList();
                if (PlatformCookedFiles.Count == 0)
                {
                    CommandUtils.LogError("Cooking did not produce any files in {0}", PlatformCookedDirectory.FullName);
                    return(false);
                }
                CookedFiles.AddRange(PlatformCookedFiles);
            }

            // Apply the optional tag to the build products
            foreach (string TagName in FindTagNamesFromList(Parameters.Tag))
            {
                FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(CookedFiles);
            }

            // Add them to the set of build products
            BuildProducts.UnionWith(CookedFiles);
            return(true);
        }
        /// <summary>
        /// Gather compile time telemetry for the given files
        /// </summary>
        /// <param name="FileToCompileEnvironment">Mapping of source file to the environment used to compile it</param>
        /// <param name="WorkingDir">The working directory for output files</param>
        /// <param name="NumSamples">Number of samples to take</param>
        /// <param name="MaxParallel">Maximum number of tasks to run in parallel.</param>
        /// <param name="Log">Log writer</param>
        public static void Generate(DirectoryReference InputDir, DirectoryReference WorkingDir, Dictionary <SourceFile, CompileEnvironment> FileToCompileEnvironment, int NumSamples, int Shard, int NumShards, int MaxParallel, LineBasedTextWriter Log)
        {
            Stopwatch Timer = Stopwatch.StartNew();

            // Create an intermediate directory
            DirectoryReference IntermediateDir = DirectoryReference.Combine(WorkingDir, "Timing");

            IntermediateDir.CreateDirectory();

            // Map of unique fragment to timing data
            Dictionary <SourceFragment, FragmentTimingData> FragmentToTimingData = new Dictionary <SourceFragment, FragmentTimingData>();

            // Map of unique fragment key to timing data
            Dictionary <string, FragmentTimingData> DigestToTimingData = new Dictionary <string, FragmentTimingData>();

            // List of all the sequences to time
            HashSet <string> UniqueNames = new HashSet <string>();

            foreach (KeyValuePair <SourceFile, CompileEnvironment> Pair in FileToCompileEnvironment)
            {
                // Find all the fragments in this file
                List <SourceFragment>           Fragments      = new List <SourceFragment>();
                List <Tuple <int, SourceFile> > IncludeHistory = new List <Tuple <int, SourceFile> >();
                Pair.Key.FindIncludedFragments(Fragments, IncludeHistory, new HashSet <SourceFile>());

                // Create a sequence for each unique fragment
                FragmentTimingData PrevTimingData = null;
                for (int Idx = 0; Idx < Fragments.Count; Idx++)
                {
                    FragmentTimingData TimingData = null;
                    if (!FragmentToTimingData.ContainsKey(Fragments[Idx]) || (Idx + 1 < Fragments.Count && !FragmentToTimingData.ContainsKey(Fragments[Idx + 1])))
                    {
                        // Create a sequence for this fragment
                        SourceFragment LastFragment = Fragments[Idx];

                        // Create a unique key for this sequence by concatenating all the fragment names
                        string Digest = Utility.ComputeDigest(String.Join("\n", Fragments.Take(Idx + 1).Select(x => x.Location.FullName)));

                        // Try to get an existing sequence for this key, otherwise create a new one;
                        if (!DigestToTimingData.TryGetValue(Digest, out TimingData))
                        {
                            // Find a unique name for this sequence
                            string UniqueName = LastFragment.Location.GetFileName();
                            for (int NameIdx = 2; !UniqueNames.Add(UniqueName); NameIdx++)
                            {
                                UniqueName = String.Format("{0}_{1}{2}", LastFragment.Location.GetFileNameWithoutExtension(), NameIdx, LastFragment.Location.GetExtension());
                            }

                            // Add the object for this sequence
                            FileReference IntermediateFile = FileReference.Combine(IntermediateDir, UniqueName);
                            TimingData = new FragmentTimingData(UniqueName, Digest, PrevTimingData, Fragments.Take(Idx + 1).ToArray(), IncludeHistory, IntermediateFile, Pair.Value);
                            DigestToTimingData.Add(Digest, TimingData);
                        }

                        // Add it to the unique mapping of fragments
                        if (!FragmentToTimingData.ContainsKey(LastFragment))
                        {
                            FragmentToTimingData[LastFragment] = TimingData;
                        }
                    }
                    PrevTimingData = TimingData;
                }
            }

            // Read any existing shard timing data in the output folder
            foreach (FileReference IntermediateFile in IntermediateDir.EnumerateFileReferences("*.csv"))
            {
                string[] Lines = File.ReadAllLines(IntermediateFile.FullName);
                foreach (string Line in Lines.Skip(1))
                {
                    string[] Tokens = Line.Split(',');
                    if (Tokens.Length == 5)
                    {
                        FragmentTimingData TimingData;
                        if (DigestToTimingData.TryGetValue(Tokens[1], out TimingData) && TimingData.Samples.Count < NumSamples)
                        {
                            FragmentTimingSample Sample = new FragmentTimingSample();
                            Sample.TotalTime    = Double.Parse(Tokens[2]);
                            Sample.FrontendTime = Double.Parse(Tokens[3]);
                            Sample.BackendTime  = Double.Parse(Tokens[4]);
                            TimingData.Samples.Add(Sample);
                        }
                    }
                }
            }

            // Find all the remaining fragments, and repeat each one by the number of times it has to be executed
            List <FragmentTimingData> FilteredFragments = DigestToTimingData.Values.ToList();

            FilteredFragments.RemoveAll(x => (int)(Math.Abs((long)x.Digest.GetHashCode()) % NumShards) != (Shard - 1));

            // Get the initial number of compile times for each fragment. We avoid saving before this number.
            List <int> InitialCompileCount = FilteredFragments.Select(x => x.Samples.Count).ToList();

            // Create all the actions to execute
            List <Action> Actions = new List <Action>();

            foreach (FragmentTimingData Fragment in FilteredFragments)
            {
                FragmentTimingData FragmentCopy = Fragment;
                for (int SampleIdx = Fragment.Samples.Count; SampleIdx < NumSamples; SampleIdx++)
                {
                    int SampleIdxCopy = SampleIdx;
                    Actions.Add(() => FragmentCopy.Compile(IntermediateDir, SampleIdxCopy));
                }
            }

            // Randomize the order to ensure that compile times are not consistently affected by other files being compiled simultaneously.
            Random Random = new Random();

            Actions = Actions.OrderBy(x => Random.Next()).ToList();

            // Compile them all
            if (Actions.Count > 0)
            {
                Utility.ParallelForWithStatus("Compiling fragments...", 0, Actions.Count, new ParallelOptions {
                    MaxDegreeOfParallelism = MaxParallel
                }, Idx => Actions[Idx](), Log);
            }

            // Write out the results
            if (NumShards > 1)
            {
                // If we're running a sharded build, write out intermediate files containing the results
                FileReference OutputFile = FileReference.Combine(IntermediateDir, String.Format("Shard{0}.csv", Shard));
                using (StreamWriter Writer = new StreamWriter(OutputFile.FullName))
                {
                    Writer.WriteLine("Name,Digest,TotalTime,FrontendTime,BackendTime");
                    for (int Idx = 0; Idx < FilteredFragments.Count; Idx++)
                    {
                        FragmentTimingData FilteredFragment = FilteredFragments[Idx];
                        for (int SampleIdx = InitialCompileCount[Idx]; SampleIdx < FilteredFragment.Samples.Count; SampleIdx++)
                        {
                            FragmentTimingSample Sample = FilteredFragment.Samples[SampleIdx];
                            Writer.WriteLine("{0},{1},{2},{3},{4}", FilteredFragment.UniqueName, FilteredFragment.Digest, Sample.TotalTime, Sample.FrontendTime, Sample.BackendTime);
                        }
                    }
                }
            }
            else
            {
                // Write out the fragment report
                FileReference FragmentReport = FileReference.Combine(WorkingDir, "Timing.csv");
                Log.WriteLine("Writing {0}...", FragmentReport);
                using (StreamWriter Writer = new StreamWriter(FragmentReport.FullName))
                {
                    // Write the header
                    Writer.Write("Fragment,MinLine,MaxLine");

                    // Write the labels for each sample type
                    string[] Types = new string[] { "Total", "Frontend", "Backend" };
                    for (int Idx = 0; Idx < Types.Length; Idx++)
                    {
                        for (int SampleIdx = 0; SampleIdx < NumSamples; SampleIdx++)
                        {
                            Writer.Write(",{0}{1}", Types[Idx], SampleIdx + 1);
                        }
                        Writer.Write(",{0}Min,{0}Max,{0}Avg,{0}Exc", Types[Idx]);
                    }
                    Writer.WriteLine();

                    // Write all the results
                    Func <FragmentTimingSample, double>[] TimeFieldDelegates = new Func <FragmentTimingSample, double>[] { x => x.TotalTime, x => x.FrontendTime, x => x.BackendTime };
                    foreach (FragmentTimingData TimingData in FragmentToTimingData.Values)
                    {
                        Writer.Write("{0},{1},{2}", TimingData.Fragment.Location.GetFileName(), TimingData.Fragment.MarkupMin + 1, TimingData.Fragment.MarkupMax + 1);
                        foreach (Func <FragmentTimingSample, double> TimeFieldDelegate in TimeFieldDelegates)
                        {
                            foreach (FragmentTimingSample Sample in TimingData.Samples)
                            {
                                Writer.Write(",{0:0.000}", TimeFieldDelegate(Sample));
                            }

                            Writer.Write(",{0:0.000}", TimingData.Samples.Min(x => TimeFieldDelegate(x)));
                            Writer.Write(",{0:0.000}", TimingData.Samples.Max(x => TimeFieldDelegate(x)));
                            Writer.Write(",{0:0.000}", TimingData.Samples.Average(x => TimeFieldDelegate(x)));

                            if (TimingData.PrevFragmentData == null)
                            {
                                Writer.Write(",{0:0.000}", TimingData.Samples.Average(x => TimeFieldDelegate(x)));
                            }
                            else
                            {
                                Writer.Write(",{0:0.000}", TimingData.Samples.Average(x => TimeFieldDelegate(x)) - TimingData.PrevFragmentData.Samples.Average(x => TimeFieldDelegate(x)));
                            }
                        }
                        Writer.WriteLine();
                    }
                }
            }
        }
    private static void FindOutputFilesHelper(HashSet<FileReference> OutputFiles, DirectoryReference BaseDir, string SearchPrefix, PhysXTargetLib TargetLib)
    {
        if(!BaseDir.Exists())
        {
            return;
        }

        foreach (FileReference FoundFile in BaseDir.EnumerateFileReferences(SearchPrefix))
        {
            string FileNameUpper = FoundFile.GetFileName().ToString().ToUpper();
            
            bool bIncludeFile = false;
            if(TargetLib == PhysXTargetLib.APEX)
            {
                bIncludeFile = FileGeneratedByAPEX(FileNameUpper);
            }
            else
            {
                bIncludeFile = !FileGeneratedByAPEX(FileNameUpper);
            }

            if(bIncludeFile)
	        {
                OutputFiles.Add(FoundFile);
            }
        }
    }