/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Find all the referenced files and delete them HashSet<FileReference> Files = ResolveFilespec(CommandUtils.RootDirectory, Parameters.Files, TagNameToFileSet); foreach(FileReference File in Files) { InternalUtils.SafeDeleteFile(File.FullName, true); } // Try to delete all the parent directories. Keep track of the directories we've already deleted to avoid hitting the disk. if(Parameters.DeleteEmptyDirectories) { // Find all the directories that we're touching HashSet<DirectoryReference> ParentDirectories = new HashSet<DirectoryReference>(); foreach(FileReference File in Files) { ParentDirectories.Add(File.Directory); } // Recurse back up from each of those directories to the root folder foreach(DirectoryReference ParentDirectory in ParentDirectories) { for(DirectoryReference CurrentDirectory = ParentDirectory; CurrentDirectory != CommandUtils.RootDirectory; CurrentDirectory = CurrentDirectory.ParentDirectory) { if(!TryDeleteEmptyDirectory(CurrentDirectory)) { break; } } } } return true; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Get the full path to the project file FileReference ProjectFile = null; if(!String.IsNullOrEmpty(Parameters.Project)) { ProjectFile = ResolveFile(Parameters.Project); } // Get the path to the editor, and check it exists FileReference EditorExe; if(String.IsNullOrEmpty(Parameters.EditorExe)) { EditorExe = new FileReference(HostPlatform.Current.GetUE4ExePath("UE4Editor-Cmd.exe")); } else { EditorExe = ResolveFile(Parameters.EditorExe); } // Make sure the editor exists if(!EditorExe.Exists()) { CommandUtils.LogError("{0} does not exist", EditorExe.FullName); return false; } // Run the commandlet CommandUtils.RunCommandlet(ProjectFile, EditorExe.FullName, Parameters.Name, Parameters.Arguments); return true; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Print the message if(!String.IsNullOrEmpty(Parameters.Message)) { CommandUtils.Log(Parameters.Message); } // Print the contents of the given tag, if specified if(!String.IsNullOrEmpty(Parameters.Files)) { HashSet<FileReference> Files = ResolveFilespec(CommandUtils.RootDirectory, Parameters.Files, TagNameToFileSet); foreach(FileReference File in Files.OrderBy(x => x.FullName)) { CommandUtils.Log(" {0}", File.FullName); if(Parameters.IncludeContents) { foreach(string Line in System.IO.File.ReadAllLines(File.FullName)) { CommandUtils.Log(" {0}", Line); } } } } return true; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Get the base directory DirectoryReference BaseDir = ResolveDirectory(Parameters.BaseDir); // Parse all the exclude rules List<string> ExcludeRules = ParseRules(BaseDir, Parameters.Except ?? "", TagNameToFileSet); // Resolve the input list HashSet<FileReference> Files = ResolveFilespecWithExcludePatterns(BaseDir, Parameters.Files, ExcludeRules, TagNameToFileSet); // Limit to matches against the 'Filter' parameter, if set if(Parameters.Filter != null) { FileFilter Filter = new FileFilter(); Filter.AddRules(ParseRules(BaseDir, Parameters.Filter, TagNameToFileSet)); Files.RemoveWhere(x => !Filter.Matches(x.FullName)); } // Apply the tag to all the matching files foreach(string TagName in FindTagNamesFromList(Parameters.With)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(Files); } return true; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { DirectoryReference FromDir = ResolveDirectory(Parameters.FromDir); // Find all the input files IEnumerable<FileReference> Files; if(Parameters.Files == null) { Files = FromDir.EnumerateFileReferences("*", System.IO.SearchOption.AllDirectories); } else { Files = ResolveFilespec(FromDir, Parameters.Files, TagNameToFileSet); } // Create the zip file FileReference ArchiveFile = ResolveFile(Parameters.ZipFile); CommandUtils.ZipFiles(ArchiveFile, FromDir, Files); // Apply the optional tag to the produced archive foreach(string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).Add(ArchiveFile); } // Add the archive to the set of build products BuildProducts.Add(ArchiveFile); return true; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Get the source and target directories DirectoryReference FromDir = ResolveDirectory(Parameters.FromDir); DirectoryReference ToDir = ResolveDirectory(Parameters.ToDir); // Copy all the files IEnumerable<FileReference> SourceFiles = ResolveFilespec(FromDir, Parameters.Files, TagNameToFileSet); IEnumerable<FileReference> TargetFiles = SourceFiles.Select(x => FileReference.Combine(ToDir, x.MakeRelativeTo(FromDir))); if(!FromDir.Exists() && !SourceFiles.Any()) { CommandUtils.Log("Skipping copy of files from '{0}' - directory does not exist.", FromDir.FullName); } else if(FromDir == ToDir) { CommandUtils.Log("Skipping copy of files in '{0}' - source directory is same as target directory", FromDir.FullName); } else { CommandUtils.ThreadedCopyFiles(SourceFiles.Select(x => x.FullName).ToList(), TargetFiles.Select(x => x.FullName).ToList()); } BuildProducts.UnionWith(TargetFiles); // Apply the optional output tag to them foreach(string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(TargetFiles); } return true; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Figure out the project that this target belongs to FileReference ProjectFile = null; if(Parameters.Project != null) { ProjectFile = new FileReference(Parameters.Project); if(!ProjectFile.Exists()) { CommandUtils.LogError("Missing project file - {0}", ProjectFile.FullName); return false; } } // Execute the cooker using(TelemetryStopwatch CookStopwatch = new TelemetryStopwatch("Cook.{0}.{1}", (ProjectFile == null)? "UE4" : ProjectFile.GetFileNameWithoutExtension(), Parameters.Platform)) { string[] Maps = (Parameters.Maps == null)? null : Parameters.Maps.Split(new char[]{ '+' }); string Arguments = (Parameters.Versioned ? "" : "-Unversioned ") + "-LogCmds=\"LogSavePackage Warning\" " + Parameters.Arguments; CommandUtils.CookCommandlet(ProjectFile, "UE4Editor-Cmd.exe", Maps, null, null, null, Parameters.Platform, Arguments); } // Find all the cooked files List<FileReference> CookedFiles = new List<FileReference>(); foreach(string Platform in Parameters.Platform.Split('+')) { DirectoryReference PlatformCookedDirectory = DirectoryReference.Combine(ProjectFile.Directory, "Saved", "Cooked", Platform); if(!PlatformCookedDirectory.Exists()) { CommandUtils.LogError("Cook output directory not found ({0})", PlatformCookedDirectory.FullName); return false; } List<FileReference> PlatformCookedFiles = PlatformCookedDirectory.EnumerateFileReferences("*", System.IO.SearchOption.AllDirectories).ToList(); if(PlatformCookedFiles.Count == 0) { CommandUtils.LogError("Cooking did not produce any files in {0}", PlatformCookedDirectory.FullName); return false; } CookedFiles.AddRange(PlatformCookedFiles); } // Apply the optional tag to the build products foreach(string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(CookedFiles); } // Add them to the set of build products BuildProducts.UnionWith(CookedFiles); return true; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { HashSet<FileReference> Files = ResolveFilespec(CommandUtils.RootDirectory, Parameters.Files, TagNameToFileSet); if (CommandUtils.AllowSubmit && Files.Count > 0) { // Get the connection that we're going to submit with P4Connection SubmitP4 = CommandUtils.P4; if (Parameters.Workspace != null) { // Create a brand new workspace P4ClientInfo Client = new P4ClientInfo(); Client.Owner = Environment.UserName; Client.Host = Environment.MachineName; Client.Stream = Parameters.Stream ?? CommandUtils.P4Env.BuildRootP4; Client.RootPath = Parameters.RootDir ?? CommandUtils.RootDirectory.FullName; Client.Name = Parameters.Workspace; Client.Options = P4ClientOption.NoAllWrite | P4ClientOption.Clobber | P4ClientOption.NoCompress | P4ClientOption.Unlocked | P4ClientOption.NoModTime | P4ClientOption.RmDir; Client.LineEnd = P4LineEnd.Local; CommandUtils.P4.CreateClient(Client, AllowSpew: false); // Create a new connection for it SubmitP4 = new P4Connection(Client.Owner, Client.Name); } // Get the latest version of it int NewCL = SubmitP4.CreateChange(Description: Parameters.Description); foreach(FileReference File in Files) { SubmitP4.Revert(String.Format("-k \"{0}\"", File.FullName)); SubmitP4.Sync(String.Format("-k \"{0}\"", File.FullName), AllowSpew: false); SubmitP4.Add(NewCL, String.Format("\"{0}\"", File.FullName)); SubmitP4.Edit(NewCL, String.Format("\"{0}\"", File.FullName)); if (Parameters.FileType != null) { SubmitP4.P4(String.Format("reopen -t \"{0}\" \"{1}\"", Parameters.FileType, File.FullName), AllowSpew: false); } } // Submit it int SubmittedCL; SubmitP4.Submit(NewCL, out SubmittedCL); if (SubmittedCL <= 0) { throw new AutomationException("Submit failed."); } CommandUtils.Log("Submitted in changelist {0}", SubmittedCL); } return true; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Update the version files List<string> FileNames = UE4Build.StaticUpdateVersionFiles(Parameters.Change, Parameters.CompatibleChange, Parameters.Branch, Parameters.Build, Parameters.Licensee, !Parameters.SkipWrite); List<FileReference> VersionFiles = FileNames.Select(x => new FileReference(x)).ToList(); // Apply the optional tag to them foreach(string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(VersionFiles); } // Add them to the list of build products BuildProducts.UnionWith(VersionFiles); return true; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Find the matching files FileReference[] PdbFiles = ResolveFilespec(CommandUtils.RootDirectory, Parameters.BinaryFiles, TagNameToFileSet).Where(x => x.HasExtension(".pdb")).ToArray(); // Find all the matching source files FileReference[] SourceFiles = ResolveFilespec(CommandUtils.RootDirectory, Parameters.SourceFiles, TagNameToFileSet).ToArray(); // Get the PDBSTR.EXE path, using the latest SDK version we can find. FileReference PdbStrExe; if (!TryGetPdbStrExe("v10.0", out PdbStrExe) && !TryGetPdbStrExe("v8.1", out PdbStrExe) && !TryGetPdbStrExe("v8.0", out PdbStrExe)) { CommandUtils.LogError("Couldn't find PDBSTR.EXE in any Windows SDK installation"); return false; } // Get the path to the generated SRCSRV.INI file FileReference SrcSrvIni = FileReference.Combine(CommandUtils.RootDirectory, "Engine", "Intermediate", "SrcSrv.ini"); SrcSrvIni.Directory.CreateDirectory(); // Generate the SRCSRV.INI file using (StreamWriter Writer = new StreamWriter(SrcSrvIni.FullName)) { Writer.WriteLine("SRCSRV: ini------------------------------------------------"); Writer.WriteLine("VERSION=1"); Writer.WriteLine("VERCTRL=Perforce"); Writer.WriteLine("SRCSRV: variables------------------------------------------"); Writer.WriteLine("SRCSRVTRG=%sdtrg%"); Writer.WriteLine("SRCSRVCMD=%sdcmd%"); Writer.WriteLine("SDCMD=p4.exe print -o %srcsrvtrg% \"{0}/%var2%@{1}\"", Parameters.Branch.TrimEnd('/'), Parameters.Change); Writer.WriteLine("SDTRG=%targ%\\{0}\\{1}\\%fnbksl%(%var2%)", Parameters.Branch.Replace('/', '+'), Parameters.Change); Writer.WriteLine("SRCSRV: source files ---------------------------------------"); foreach (FileReference SourceFile in SourceFiles) { string RelativeSourceFile = SourceFile.MakeRelativeTo(CommandUtils.RootDirectory); Writer.WriteLine("{0}*{1}", SourceFile.FullName, RelativeSourceFile.Replace('\\', '/')); } Writer.WriteLine("SRCSRV: end------------------------------------------------"); } // Embed the data in the PDB files foreach(FileReference PdbFile in PdbFiles) { CommandUtils.Run(PdbStrExe.FullName, String.Format("-w -p:\"{0}\" -i:\"{1}\" -s:srcsrv", PdbFile.FullName, SrcSrvIni.FullName)); } return true; }
public void RetryOrPoison(Job job, Exception exception, JobContext context) { if(job == null) throw new ArgumentNullException("job"); if(exception == null) throw new ArgumentNullException("exception"); if(context == null) throw new ArgumentNullException("context"); var config = _configuration.For(job.Type); if(job.DispatchCount <= config.RetryCount) { _recoverableAction.Run(() => job = _statusChanger.Change(job, JobStatus.Failed), then: () => _failedJobQueue.Add(job)); } else { context.Exception = exception; _jobCoordinator.Run(job, () => _statusChanger.Change(job, JobStatus.Poisoned)); } }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Get the base directory DirectoryReference BaseDir = ResolveDirectory(Parameters.BaseDir); // Get the output directory DirectoryReference OutputDir = ResolveDirectory(Parameters.OutputDir); // Make sure the source and destination directories don't overlap. We can't strip in-place at the moment. if(BaseDir == OutputDir) { CommandUtils.LogError("Output directory for stripped files is the same as source directory ({0})", BaseDir.FullName); return false; } // Find the matching files FileReference[] SourceFiles = ResolveFilespec(BaseDir, Parameters.Files, TagNameToFileSet).OrderBy(x => x.FullName).ToArray(); // Create the matching target files FileReference[] TargetFiles = SourceFiles.Select(x => FileReference.Combine(OutputDir, x.MakeRelativeTo(BaseDir))).ToArray(); // Run the stripping command UEBuildPlatform Platform = UEBuildPlatform.GetBuildPlatform(Parameters.Platform); UEToolChain ToolChain = Platform.CreateContext(null).CreateToolChainForDefaultCppPlatform(); for (int Idx = 0; Idx < SourceFiles.Length; Idx++) { TargetFiles[Idx].Directory.CreateDirectory(); CommandUtils.Log("Stripping symbols: {0} -> {1}", SourceFiles[Idx].FullName, TargetFiles[Idx].FullName); ToolChain.StripSymbols(SourceFiles[Idx].FullName, TargetFiles[Idx].FullName); } // Apply the optional tag to the build products foreach(string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(TargetFiles); } // Add the target files to the set of build products BuildProducts.UnionWith(TargetFiles); return true; }
static void DispatchCore(ExceptionFilter filter, Exception exception, JobContext context, IDependencyScope scope) { var instance = scope.GetService(filter.Type); foreach (var argument in filter.Arguments) { var exceptionContext = argument as ExceptionContext; if (exceptionContext != null) { exceptionContext.ActivityType = context.ActivityType; exceptionContext.Method = context.Method; exceptionContext.Arguments = context.Arguments; exceptionContext.Exception = exception; exceptionContext.DispatchCount = context.DispatchCount; } } var method = filter.Type.GetMethod(filter.Method); method.Invoke(instance, filter.Arguments); }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // If we're merging telemetry from the child process, get a temp filename for it FileReference TelemetryFile = null; if (Parameters.MergeTelemetryWithPrefix != null) { TelemetryFile = FileReference.Combine(CommandUtils.RootDirectory, "Engine", "Intermediate", "UAT", "Telemetry.json"); TelemetryFile.Directory.CreateDirectory(); } // Run the command string CommandLine = Parameters.Name; if (!String.IsNullOrEmpty(Parameters.Arguments)) { CommandLine += String.Format(" {0}", Parameters.Arguments); } if(TelemetryFile != null) { CommandLine += String.Format(" -Telemetry={0}", CommandUtils.MakePathSafeToUseWithCommandLine(TelemetryFile.FullName)); } try { CommandUtils.RunUAT(CommandUtils.CmdEnv, CommandLine); } catch(CommandUtils.CommandFailedException) { return false; } // Merge in any new telemetry data that was produced if (Parameters.MergeTelemetryWithPrefix != null) { TelemetryData NewTelemetry; if (TelemetryData.TryRead(TelemetryFile.FullName, out NewTelemetry)) { CommandUtils.Telemetry.Merge(Parameters.MergeTelemetryWithPrefix, NewTelemetry); } } return true; }
public void Dispatch(Job job, Exception exception, JobContext context, IDependencyScope scope) { if (job == null) throw new ArgumentNullException("job"); if (exception == null) throw new ArgumentNullException("exception"); if (context == null) throw new ArgumentNullException("context"); if (scope == null) throw new ArgumentNullException("scope"); foreach (var filter in job.ExceptionFilters) { try { DispatchCore(filter, exception, context, scope); } catch (Exception e) { if (e.IsFatal()) throw; _eventStream.Publish<ExceptionFilterDispatcher>(e); } } }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { DirectoryReference ToDir = ResolveDirectory(Parameters.ToDir); // Find all the zip files IEnumerable<FileReference> ZipFiles = ResolveFilespec(CommandUtils.RootDirectory, Parameters.ZipFile, TagNameToFileSet); // Extract the files HashSet<FileReference> OutputFiles = new HashSet<FileReference>(); foreach(FileReference ZipFile in ZipFiles) { OutputFiles.UnionWith(CommandUtils.UnzipFiles(ZipFile.FullName, ToDir.FullName).Select(x => new FileReference(x))); } // Apply the optional tag to the produced archive foreach(string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(OutputFiles); } // Add the archive to the set of build products BuildProducts.UnionWith(OutputFiles); return true; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { bool bSuccess = false; UEBuildPlatform Platform = UEBuildPlatform.GetBuildPlatform(Parameters.Platform); UEToolChain ToolChain = Platform.CreateContext(null).CreateToolChainForDefaultCppPlatform(); // Find the matching files List<FileReference> Files = ResolveFilespec(CommandUtils.RootDirectory, Parameters.Files, TagNameToFileSet).ToList(); // Get the symbol store directory DirectoryReference StoreDir = ResolveDirectory(Parameters.StoreDir); // Take the lock before accessing the symbol server LockFile.TakeLock(StoreDir, TimeSpan.FromMinutes(15), () => { bSuccess = ToolChain.PublishSymbols(StoreDir, Files, Parameters.Product); }); if (!bSuccess) CommandUtils.LogError("Failure publishing symbol files."); return bSuccess; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Find the directories we're going to rebase relative to HashSet<DirectoryReference> RebaseDirs = new HashSet<DirectoryReference>{ CommandUtils.RootDirectory }; if(Parameters.RebaseDir != null) { RebaseDirs.UnionWith(SplitDelimitedList(Parameters.RebaseDir).Select(x => ResolveDirectory(x))); } // Get the output parameter FileReference OutputFile = ResolveFile(Parameters.Output); // Check for a ResponseFile parameter FileReference ResponseFile = null; if (!String.IsNullOrEmpty(Parameters.ResponseFile)) { ResponseFile = ResolveFile(Parameters.ResponseFile); } if (ResponseFile == null) { // Get a unique filename for the response file ResponseFile = FileReference.Combine(new DirectoryReference(CommandUtils.CmdEnv.LogFolder), String.Format("PakList_{0}.txt", OutputFile.GetFileNameWithoutExtension())); for (int Idx = 2; ResponseFile.Exists(); Idx++) { ResponseFile = FileReference.Combine(ResponseFile.Directory, String.Format("PakList_{0}_{1}.txt", OutputFile.GetFileNameWithoutExtension(), Idx)); } // Write out the response file HashSet<FileReference> Files = ResolveFilespec(CommandUtils.RootDirectory, Parameters.Files, TagNameToFileSet); using (StreamWriter Writer = new StreamWriter(ResponseFile.FullName, false, new System.Text.UTF8Encoding(true))) { foreach (FileReference File in Files) { string RelativePath = FindShortestRelativePath(File, RebaseDirs); if (RelativePath == null) { CommandUtils.LogError("Couldn't find relative path for '{0}' - not under any rebase directories", File.FullName); return false; } Writer.WriteLine("\"{0}\" \"{1}\"{2}", File.FullName, RelativePath, Parameters.Compress ? " -compress" : ""); } } } // Format the command line StringBuilder CommandLine = new StringBuilder(); CommandLine.AppendFormat("{0} -create={1}", CommandUtils.MakePathSafeToUseWithCommandLine(OutputFile.FullName), CommandUtils.MakePathSafeToUseWithCommandLine(ResponseFile.FullName)); if(Parameters.Sign != null) { CommandLine.AppendFormat(" -sign={0}", CommandUtils.MakePathSafeToUseWithCommandLine(ResolveFile(Parameters.Sign).FullName)); } if(Parameters.Order != null) { CommandLine.AppendFormat(" -order={0}", CommandUtils.MakePathSafeToUseWithCommandLine(ResolveFile(Parameters.Order).FullName)); } if (GlobalCommandLine.Installed) { CommandLine.Append(" -installed"); } if (GlobalCommandLine.UTF8Output) { CommandLine.AppendFormat(" -UTF8Output"); } // Get the executable path FileReference UnrealPakExe; if(HostPlatform.Current.HostEditorPlatform == UnrealTargetPlatform.Win64) { UnrealPakExe = ResolveFile("Engine/Binaries/Win64/UnrealPak.exe"); } else { UnrealPakExe = ResolveFile(String.Format("Engine/Binaries/{0}/UnrealPak", HostPlatform.Current.HostEditorPlatform.ToString())); } // Run it CommandUtils.Log("Running '{0} {1}'", CommandUtils.MakePathSafeToUseWithCommandLine(UnrealPakExe.FullName), CommandLine.ToString()); CommandUtils.RunAndLog(CommandUtils.CmdEnv, UnrealPakExe.FullName, CommandLine.ToString(), Options: CommandUtils.ERunOptions.Default | CommandUtils.ERunOptions.UTF8Output); BuildProducts.Add(OutputFile); // Apply the optional tag to the output file foreach(string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).Add(OutputFile); } return true; }
/// <summary> /// Build all the tasks for this node /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include. Should be set to contain the node inputs on entry.</param> /// <returns>Whether the task succeeded or not. Exiting with an exception will be caught and treated as a failure.</returns> public bool Build(JobContext Job, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Allow tasks to merge together MergeTasks(); // Build everything HashSet<FileReference> BuildProducts = TagNameToFileSet[DefaultOutput.TagName]; foreach(CustomTask Task in Tasks) { if(!Task.Execute(Job, BuildProducts, TagNameToFileSet)) { CommandUtils.Log("Failed to execute task."); return false; } } // Remove anything that doesn't exist, since these files weren't explicitly tagged BuildProducts.RemoveWhere(x => !x.Exists()); return true; }
private async Task ProcessFolders(List <RepositoryFolder> folders, string path, JobContext context) { foreach (var repositoryFolder in folders) { var newPath = Path.Combine(path, repositoryFolder.PhysicalName); await ProcessFiles(repositoryFolder.Files, newPath, context); await ProcessFolders(repositoryFolder.Folders, newPath, context); } }
private async Task ConvertFiles(string id, List <RepositoryFile> files, PaketDIP paket, string rootFolder, string tempFolder, JobContext context) { // Skip empty collections if (files.Count == 0) { return; } // Create the list with conversion files. // This list will contain the splitted file names for processing // This list does not contain files that didn't have the flag exported or should be skipped var conversionFiles = pdfManipulator.ConvertToConversionFiles(files.ToList(), tempFolder, true); var sw = new Stopwatch(); sw.Start(); var parallelism = Settings.Default.DocumentTransformParallelism; Log.Information("Starting parallel document transform for-each-loop with parallelism of {parallelism} for {Count} files of archiveRecordId or orderId {id}", parallelism, files.Count, id); var supportedFileTypesForRendering = await renderEngine.GetSupportedFileTypes(); await conversionFiles.ParallelForEachAsync(async conversionFile => { var file = new FileInfo(conversionFile.FullName); Log.Information("Start conversion for file: {file} for archive record or order id {id}", file, id); conversionFile.ConvertedFile = await ConvertFile(file, supportedFileTypesForRendering, context); }, parallelism, true); // Now stich back files that were possibly splitted pdfManipulator.MergeSplittedFiles(conversionFiles); // Update the metadata.xml for all the converted files // As speed is not an issue, we're not doing it in parallel foreach (var conversionFile in conversionFiles) { var file = new FileInfo(conversionFile.FullName); if (string.IsNullOrEmpty(conversionFile.ParentId)) { MetadataXmlUpdater.UpdateFile(file, new FileInfo(conversionFile.ConvertedFile), paket, rootFolder); } // Delete the original file, if the convertedFile exists and is not the same as the original file. // In case of PDF the name of the original and converted file could be the same. --> PDF to PDF with OCR if (file.Exists && conversionFile.ConvertedFile != file.FullName) { file.Delete(); } } sw.Stop(); Log.Information("Finished parallel document transform for-each-loop with parallelism of {parallelism} for {Count} files of archiveRecordId or orderId {id} in {TotalSeconds}", parallelism, files.Count, id, sw.Elapsed.TotalSeconds); }
/// <summary> /// Converts a package to a usage copy. /// </summary> /// <param name="id">ArchiveRecordId oder OrderItemId</param> /// <param name="assetType">The asset type.</param> /// <param name="package">The package to convert</param> /// <returns>PackageConversionResult.</returns> public async Task <PackageConversionResult> ConvertPackage(string id, AssetType assetType, bool protectWithPassword, RepositoryPackage package) { var retVal = new PackageConversionResult { Valid = true }; var packageFileName = Path.Combine(Settings.Default.PickupPath, package.PackageFileName); var fi = new FileInfo(packageFileName); // Make sure Gebrauchskopien have a packageId if (assetType == AssetType.Gebrauchskopie && string.IsNullOrEmpty(package.PackageId)) { throw new InvalidOperationException("Assets of type <Gebrauchskopie> require a packageId"); } if (File.Exists(fi.FullName)) { Log.Information("Found zip file {Name}. File is already unzipped.", fi.Name); var tempFolder = Path.Combine(fi.DirectoryName ?? throw new InvalidOperationException(), fi.Name.Remove(fi.Name.Length - fi.Extension.Length)); try { var metadataFile = Path.Combine(tempFolder, "header", "metadata.xml"); var paket = (PaketDIP)Paket.LoadFromFile(metadataFile); var contentFolder = Path.Combine(tempFolder, "content"); var context = new JobContext { ArchiveRecordId = package.ArchiveRecordId, PackageId = package.PackageId }; await ConvertFiles(id, package.Files, paket, tempFolder, contentFolder, context); await ConvertFolders(id, package.Folders, paket, tempFolder, contentFolder, context); paket.Generierungsdatum = DateTime.Today; ((Paket)paket).SaveToFile(metadataFile); AddReadmeFile(tempFolder); AddDesignFiles(tempFolder); CreateIndexHtml(tempFolder, package.PackageId); // Create zip file with the name of the archive var finalZipFolder = Path.Combine(fi.DirectoryName, assetType.ToString(), id); var finalZipFile = finalZipFolder + ".zip"; CreateZipFile(finalZipFolder, finalZipFile, tempFolder, protectWithPassword, id); retVal.FileName = finalZipFile; // if we are here everything is groovy Log.Information("Successfully processed (converted formats) zip file {Name}", fi.Name); } catch (Exception ex) { Log.Error(ex, "Unexpected exception while converting the package."); retVal.Valid = false; retVal.ErrorMessage = $"Unexpected exception while converting the package.\nException:\n{ex}"; return(retVal); } finally { // Delete the temp files if (Directory.Exists(tempFolder)) { Directory.Delete(tempFolder, true); } } } else { Log.Warning("Unable to find the zip file {packageFileName} for conversion.", packageFileName); retVal.Valid = false; retVal.ErrorMessage = $"Unable to find the zip file {packageFileName} for conversion."; return(retVal); } return(retVal); }
public static string Check(JobContext jobContext) { string CompanyCode = System.Configuration.ConfigurationManager.AppSettings["CompanyCode"]; string messageInfo = string.Empty; if (DateTime.Now.DayOfWeek.ToString().Trim().Equals("Monday")) { List <MoreThanTenDaysOrderInfoEntity> UnauditedOrNotOutStockList = SendAlarmMailDA.GetMoreThanTenDaysUnauditedOrNotOutStockOrderInfoList(CompanyCode); if (UnauditedOrNotOutStockList.Count > 0) { #region 发送超过10天未审核提醒邮件 List <MoreThanTenDaysOrderInfoEntity> UnauditedList = UnauditedOrNotOutStockList.FindAll(x => { return(x.OrderStatus == 0 || x.OrderStatus == 3 || x.OrderStatus == 2); }); if (UnauditedList.Count > 0) //判断是否发送超过10天未审核邮件提醒 { string strMailFrom = ConfigurationManager.AppSettings["UnauditedMailFrom"].ToString(); string strMailAddress = ConfigurationManager.AppSettings["UnauditedMailTo"].ToString(); string strCCMailAddress = ConfigurationManager.AppSettings["UnauditedMailCC"].ToString(); if (UnauditedList.Count <= 250) { string strMailSubject = "超过10天未审核订单明细"; string strMailTitle = "请关注以下超过10天未审核的订单,请尽快处理"; Boolean sendResult = SendAlarmMailDA.SendAlarmMail(UnauditedList, strMailFrom, strMailAddress, strCCMailAddress, strMailSubject, strMailTitle); if (sendResult) { messageInfo = messageInfo + "\n" + "超时10天未审核订单邮件已经发送至 " + strMailAddress + " 请注意查收"; } } else { int startIndex = 0; int listCount = 250; List <MoreThanTenDaysOrderInfoEntity> UnauditedListPart; int partIndex = 1; for (int i = 0; i < UnauditedList.Count / 250; i++) { UnauditedListPart = UnauditedList.GetRange(startIndex, listCount); string strMailSubject = "超过10天未审核订单明细 Part:" + partIndex; string strMailTitle = "请关注以下超过10天未审核的订单 Part:" + partIndex + ",请尽快处理"; Boolean sendResult = SendAlarmMailDA.SendAlarmMail(UnauditedListPart, strMailFrom, strMailAddress, strCCMailAddress, strMailSubject, strMailTitle); if (sendResult) { messageInfo = messageInfo + "\n" + "超时10天未审核订单邮件 Part:" + partIndex + " 已经发送至" + strMailAddress + " 请注意查收"; } startIndex = startIndex + 250; partIndex = partIndex + 1; } if (startIndex < UnauditedList.Count) { UnauditedListPart = UnauditedList.GetRange(startIndex, UnauditedList.Count - startIndex); string strMailSubject = "超过10天未审核订单明细 Part:" + partIndex; string strMailTitle = "请关注以下超过10天未审核的订单 Part:" + partIndex + ",请尽快处理"; Boolean sendResult = SendAlarmMailDA.SendAlarmMail(UnauditedListPart, strMailFrom, strMailAddress, strCCMailAddress, strMailSubject, strMailTitle); if (sendResult) { messageInfo = messageInfo + "\n" + "超时10天未审核订单邮件 Part:" + partIndex + " 已经发送至" + strMailAddress + " 请注意查收"; } } } } #endregion #region 发送超过10天未出库提醒邮件 List <MoreThanTenDaysOrderInfoEntity> NotOutStockList = UnauditedOrNotOutStockList.FindAll(x => { return(x.OrderStatus == 1); }); if (NotOutStockList.Count > 0)//判断是否发送超过10天待出库邮件提醒 { string strMailFrom = ConfigurationManager.AppSettings["NotOutStockMailFrom"].ToString(); string strMailAddress = ConfigurationManager.AppSettings["NotOutStockMailTo"].ToString(); string strCCMailAddress = ConfigurationManager.AppSettings["NotOutStockMailCC"].ToString(); if (NotOutStockList.Count <= 250) { string strMailSubject = "超过10天未出库订单明细"; string strMailTitle = "请关注以下超过10天未出库的订单,请尽快处理"; Boolean sendResult = SendAlarmMailDA.SendAlarmMail(NotOutStockList, strMailFrom, strMailAddress, strCCMailAddress, strMailSubject, strMailTitle); if (sendResult) { if (messageInfo != string.Empty) { messageInfo = messageInfo + "\n" + "超时10天未出库邮件已经发送至 " + strMailAddress + " 请注意查收"; } } } else { int startIndex = 0; int listCount = 250; List <MoreThanTenDaysOrderInfoEntity> NotOutStockListPart; int partIndex = 1; for (int i = 0; i < UnauditedList.Count / 250; i++) { NotOutStockListPart = NotOutStockList.GetRange(startIndex, listCount); string strMailSubject = "超过10天未审核订单明细 Part:" + partIndex; string strMailTitle = "请关注以下超过10天未审核的订单 Part:" + partIndex + ",请尽快处理"; Boolean sendResult = SendAlarmMailDA.SendAlarmMail(NotOutStockList, strMailFrom, strMailAddress, strCCMailAddress, strMailSubject, strMailTitle); if (sendResult) { messageInfo = messageInfo + "\n" + "超时10天未出库邮件 Part:" + partIndex + " 已经发送至 " + strMailAddress + " 请注意查收"; } startIndex = startIndex + 250; partIndex = partIndex + 1; } if (startIndex < UnauditedList.Count) { NotOutStockListPart = NotOutStockList.GetRange(startIndex, UnauditedList.Count - startIndex); string strMailSubject = "超过10天未审核订单明细 Part:" + partIndex; string strMailTitle = "请关注以下超过10天未审核的订单 Part:" + partIndex + ",请尽快处理"; Boolean sendResult = SendAlarmMailDA.SendAlarmMail(NotOutStockList, strMailFrom, strMailAddress, strCCMailAddress, strMailSubject, strMailTitle); if (sendResult) { messageInfo = messageInfo + "\n" + "超时10天未出库邮件 Part:" + partIndex + " 已经发送至" + strMailAddress + "请注意查收"; } } } } #endregion } } else { messageInfo = "今天不是周一 不发送提醒邮件"; } return(messageInfo); }
public void Execute(JobContext context) { // NOP }
public void Execute(JobContext jobContext) { Instance.Execute(jobContext); }
/// <summary> /// Sets job's context /// </summary> /// <param name="context"></param> internal void SetContext(JobContext context) { this.context = context; }
public override IList <InputSplit> GetSplits(JobContext context) { return(new AList <InputSplit>()); }
public void Run(JobContext context) { CurrentContext = context; Start(); }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> public override void Execute(JobContext Job, HashSet <FileReference > BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet) { // Set the Engine directory DirectoryReference EngineDir = DirectoryReference.Combine(CommandUtils.RootDirectory, "Engine"); if (!String.IsNullOrEmpty(Parameters.EngineDir)) { EngineDir = DirectoryReference.Combine(CommandUtils.RootDirectory, Parameters.EngineDir); } // Set the Project directory DirectoryReference ProjectDir = DirectoryReference.Combine(CommandUtils.RootDirectory, "Engine"); if (!String.IsNullOrEmpty(Parameters.ProjectDir)) { ProjectDir = DirectoryReference.Combine(CommandUtils.RootDirectory, Parameters.ProjectDir); } // Resolve the input list IEnumerable <FileReference> TargetFiles = ResolveFilespec(CommandUtils.RootDirectory, Parameters.Files, TagNameToFileSet); HashSet <FileReference> Files = new HashSet <FileReference>(); foreach (FileReference TargetFile in TargetFiles) { // check all files are .target files if (TargetFile.GetExtension() != ".target") { throw new AutomationException("Invalid file passed to TagReceipt task ({0})", TargetFile.FullName); } // Read the receipt TargetReceipt Receipt; if (!TargetReceipt.TryRead(TargetFile, EngineDir, ProjectDir, out Receipt)) { CommandUtils.LogWarning("Unable to load file using TagReceipt task ({0})", TargetFile.FullName); continue; } if (Parameters.BuildProducts) { foreach (BuildProduct BuildProduct in Receipt.BuildProducts) { if (String.IsNullOrEmpty(Parameters.BuildProductType) || BuildProduct.Type == BuildProductType) { Files.Add(BuildProduct.Path); } } } if (Parameters.RuntimeDependencies) { foreach (RuntimeDependency RuntimeDependency in Receipt.RuntimeDependencies) { if (String.IsNullOrEmpty(Parameters.StagedFileType) || RuntimeDependency.Type == StagedFileType) { // Only add files that exist as dependencies are assumed to always exist FileReference DependencyPath = RuntimeDependency.Path; if (FileReference.Exists(DependencyPath)) { Files.Add(DependencyPath); } else { CommandUtils.LogWarning("File listed as RuntimeDependency in {0} does not exist ({1})", TargetFile.FullName, DependencyPath.FullName); } } } } if (Parameters.PrecompiledBuildDependencies) { foreach (FileReference PrecompiledBuildDependency in Receipt.PrecompiledBuildDependencies) { // Only add files that exist as dependencies are assumed to always exist FileReference DependencyPath = PrecompiledBuildDependency; if (FileReference.Exists(DependencyPath)) { Files.Add(DependencyPath); } else { CommandUtils.LogWarning("File listed as PrecompiledBuildDependency in {0} does not exist ({1})", TargetFile.FullName, DependencyPath.FullName); } } } if (Parameters.PrecompiledRuntimeDependencies) { foreach (FileReference PrecompiledRuntimeDependency in Receipt.PrecompiledRuntimeDependencies) { // Only add files that exist as dependencies are assumed to always exist FileReference DependencyPath = PrecompiledRuntimeDependency; if (FileReference.Exists(DependencyPath)) { Files.Add(DependencyPath); } else { CommandUtils.LogWarning("File listed as PrecompiledRuntimeDependency in {0} does not exist ({1})", TargetFile.FullName, DependencyPath.FullName); } } } } // Apply the tag to all the matching files FindOrAddTagSet(TagNameToFileSet, Parameters.With).UnionWith(Files); }
public void Run(JobContext context) { SendAmbassadorPointsBP.jobContext = context; SendAmbassadorPointsBP.CheckAmbassadorOrder(); }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> public override void Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet) { // Output a warning if the project directory is specified if (Parameters.ProjectDir != null) { CommandUtils.LogWarning("The ProjectDir argument to the TagReceipt parameter is deprecated. This path is now determined automatically from the receipt."); } // Set the Engine directory DirectoryReference EngineDir = Parameters.EngineDir ?? CommandUtils.EngineDirectory; // Resolve the input list IEnumerable <FileReference> TargetFiles = ResolveFilespec(CommandUtils.RootDirectory, Parameters.Files, TagNameToFileSet); HashSet <FileReference> Files = new HashSet <FileReference>(); foreach (FileReference TargetFile in TargetFiles) { // check all files are .target files if (TargetFile.GetExtension() != ".target") { throw new AutomationException("Invalid file passed to TagReceipt task ({0})", TargetFile.FullName); } // Read the receipt TargetReceipt Receipt; if (!TargetReceipt.TryRead(TargetFile, EngineDir, out Receipt)) { CommandUtils.LogWarning("Unable to load file using TagReceipt task ({0})", TargetFile.FullName); continue; } if (Parameters.BuildProducts) { foreach (BuildProduct BuildProduct in Receipt.BuildProducts) { if (BuildProductType.HasValue && BuildProduct.Type != BuildProductType.Value) { continue; } if (StagedFileType.HasValue && TargetReceipt.GetStageTypeFromBuildProductType(BuildProduct) != StagedFileType.Value) { continue; } Files.Add(BuildProduct.Path); } } if (Parameters.RuntimeDependencies) { foreach (RuntimeDependency RuntimeDependency in Receipt.RuntimeDependencies) { // Skip anything that doesn't match the files we want if (BuildProductType.HasValue) { continue; } if (StagedFileType.HasValue && RuntimeDependency.Type != StagedFileType.Value) { continue; } // Check which files exist, and warn about any that don't. Ignore debug files, as they are frequently excluded for size (eg. UE4 on GitHub). This matches logic during staging. FileReference DependencyPath = RuntimeDependency.Path; if (FileReference.Exists(DependencyPath)) { Files.Add(DependencyPath); } else if (RuntimeDependency.Type != UnrealBuildTool.StagedFileType.DebugNonUFS) { CommandUtils.LogWarning("File listed as RuntimeDependency in {0} does not exist ({1})", TargetFile.FullName, DependencyPath.FullName); } } } } // Apply the tag to all the matching files FindOrAddTagSet(TagNameToFileSet, Parameters.With).UnionWith(Files); }
public override void OnException(JobContext jobContext, Exception ex) { _logger.LogError(ex, nameof(OnException) + (ex.Data["Method"] ?? string.Empty)); }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Get the list of symbol file name patterns from the platform. UEBuildPlatform Platform = UEBuildPlatform.GetBuildPlatform(Parameters.Platform); UEToolChain ToolChain = Platform.CreateContext(null).CreateToolChainForDefaultCppPlatform(); var DirectoryStructure = ToolChain.SymbolServerDirectoryStructure; if (DirectoryStructure == null) { CommandUtils.LogError("Platform does not specify the symbol server structure. Cannot age the symbol server."); return false; } string Filter = string.IsNullOrWhiteSpace(Parameters.Filter) ? string.Empty : Parameters.Filter.Trim(); // Get the time at which to expire files DateTime ExpireTimeUtc = DateTime.UtcNow - TimeSpan.FromDays(Parameters.Days); CommandUtils.Log("Expiring all files before {0}...", ExpireTimeUtc); // Scan the store directory and delete old symbol files DirectoryReference SymbolServerDirectory = ResolveDirectory(Parameters.StoreDir); LockFile.TakeLock(SymbolServerDirectory, TimeSpan.FromMinutes(15), () => { RecurseDirectory(ExpireTimeUtc, new DirectoryInfo(SymbolServerDirectory.FullName), DirectoryStructure, 0, Filter); }); return true; }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> public override IList <InputSplit> GetSplits(JobContext context) { return(splits); }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Create the agenda UE4Build.BuildAgenda Agenda = new UE4Build.BuildAgenda(); Agenda.Targets.AddRange(Targets); // Build everything Dictionary<UE4Build.BuildTarget, BuildManifest> TargetToManifest = new Dictionary<UE4Build.BuildTarget,BuildManifest>(); UE4Build Builder = new UE4Build(Job.OwnerCommand); try { bool bCanUseParallelExecutor = (BuildHostPlatform.Current.Platform == UnrealTargetPlatform.Win64); // parallel executor is only available on Windows as of 2016-09-22 Builder.Build(Agenda, InDeleteBuildProducts: null, InUpdateVersionFiles: false, InForceNoXGE: false, InUseParallelExecutor: bCanUseParallelExecutor, InTargetToManifest: TargetToManifest); } catch (CommandUtils.CommandFailedException) { return false; } UE4Build.CheckBuildProducts(Builder.BuildProductFiles); // Tag all the outputs foreach(KeyValuePair<UE4Build.BuildTarget, string> TargetTagName in TargetToTagName) { BuildManifest Manifest; if(!TargetToManifest.TryGetValue(TargetTagName.Key, out Manifest)) { throw new AutomationException("Missing manifest for target {0} {1} {2}", TargetTagName.Key.TargetName, TargetTagName.Key.Platform, TargetTagName.Key.Config); } foreach(string TagName in SplitDelimitedList(TargetTagName.Value)) { HashSet<FileReference> FileSet = FindOrAddTagSet(TagNameToFileSet, TagName); FileSet.UnionWith(Manifest.BuildProducts.Select(x => new FileReference(x))); FileSet.UnionWith(Manifest.LibraryBuildProducts.Select(x => new FileReference(x))); } } // Add everything to the list of build products BuildProducts.UnionWith(Builder.BuildProductFiles.Select(x => new FileReference(x))); BuildProducts.UnionWith(Builder.LibraryBuildProductFiles.Select(x => new FileReference(x))); return true; }
public void Run(JobContext jobContext) { VIPCustomerPresentedPointsBP.PresentedPoints(jobContext); }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> public override void Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet) { // Get the project path, and check it exists FileReference ProjectFile = null; if (Parameters.Project != null) { ProjectFile = ResolveFile(Parameters.Project); if (!FileReference.Exists(ProjectFile)) { throw new AutomationException("Couldn't find project '{0}'", ProjectFile.FullName); } } // Get the directories used for staging this project DirectoryReference SourceEngineDir = CommandUtils.EngineDirectory; DirectoryReference SourceProjectDir = (ProjectFile == null)? SourceEngineDir : ProjectFile.Directory; // Get the output directories. We flatten the directory structure on output. DirectoryReference TargetDir = ResolveDirectory(Parameters.ToDir); DirectoryReference TargetEngineDir = DirectoryReference.Combine(TargetDir, "Engine"); DirectoryReference TargetProjectDir = DirectoryReference.Combine(TargetDir, ProjectFile.GetFileNameWithoutExtension()); // Get the path to the receipt FileReference ReceiptFileName = TargetReceipt.GetDefaultPath(SourceProjectDir, Parameters.Target, Parameters.Platform, Parameters.Configuration, Parameters.Architecture); // Try to load it TargetReceipt Receipt; if (!TargetReceipt.TryRead(ReceiptFileName, SourceEngineDir, SourceProjectDir, out Receipt)) { throw new AutomationException("Couldn't read receipt '{0}'", ReceiptFileName); } // Stage all the build products needed at runtime HashSet <FileReference> SourceFiles = new HashSet <FileReference>(); foreach (BuildProduct BuildProduct in Receipt.BuildProducts.Where(x => x.Type != BuildProductType.StaticLibrary && x.Type != BuildProductType.ImportLibrary)) { SourceFiles.Add(BuildProduct.Path); } foreach (RuntimeDependency RuntimeDependency in Receipt.RuntimeDependencies.Where(x => x.Type != StagedFileType.UFS)) { SourceFiles.Add(RuntimeDependency.Path); } // Get all the target files List <FileReference> TargetFiles = new List <FileReference>(); foreach (FileReference SourceFile in SourceFiles) { // Get the destination file to copy to, mapping to the new engine and project directories as appropriate FileReference TargetFile; if (SourceFile.IsUnderDirectory(SourceEngineDir)) { TargetFile = FileReference.Combine(TargetEngineDir, SourceFile.MakeRelativeTo(SourceEngineDir)); } else { TargetFile = FileReference.Combine(TargetProjectDir, SourceFile.MakeRelativeTo(SourceProjectDir)); } // Fixup the case of the output file. Would expect Platform.DeployLowerCaseFilenames() to return true here, but seems not to be the case. if (Parameters.Platform == UnrealTargetPlatform.PS4) { TargetFile = FileReference.Combine(TargetDir, TargetFile.MakeRelativeTo(TargetDir).ToLowerInvariant()); } // Only copy the output file if it doesn't already exist. We can stage multiple targets to the same output directory. if (Parameters.Overwrite || !FileReference.Exists(TargetFile)) { DirectoryReference.CreateDirectory(TargetFile.Directory); CommandUtils.CopyFile(SourceFile.FullName, TargetFile.FullName); // Force all destination files to not readonly. CommandUtils.SetFileAttributes(TargetFile.FullName, ReadOnly: false); } // Add it to the list of target files TargetFiles.Add(TargetFile); } // Apply the optional tag to the build products foreach (string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(TargetFiles); } // Add the target file to the list of build products BuildProducts.UnionWith(TargetFiles); }
public abstract void Execute(JobContext context);
private async Task ConvertFolders(string id, List <RepositoryFolder> folders, PaketDIP paket, string rootFolder, string tempFolder, JobContext context) { foreach (var repositoryFolder in folders) { var newPath = Path.Combine(tempFolder, repositoryFolder.PhysicalName); await ConvertFiles(id, repositoryFolder.Files, paket, rootFolder, newPath, context); await ConvertFolders(id, repositoryFolder.Folders, paket, rootFolder, newPath, context); } }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> /// <returns>True if the task succeeded</returns> public override bool Execute(JobContext Job, HashSet<FileReference> BuildProducts, Dictionary<string, HashSet<FileReference>> TagNameToFileSet) { // Get the project path, and check it exists FileReference ProjectFile = null; if(Parameters.Project != null) { ProjectFile = ResolveFile(Parameters.Project); if(!ProjectFile.Exists()) { CommandUtils.LogError("Couldn't find project '{0}'", ProjectFile.FullName); return false; } } // Get the directories used for staging this project DirectoryReference SourceEngineDir = UnrealBuildTool.UnrealBuildTool.EngineDirectory; DirectoryReference SourceProjectDir = (ProjectFile == null)? SourceEngineDir : ProjectFile.Directory; // Get the output directories. We flatten the directory structure on output. DirectoryReference TargetDir = ResolveDirectory(Parameters.ToDir); DirectoryReference TargetEngineDir = DirectoryReference.Combine(TargetDir, "Engine"); DirectoryReference TargetProjectDir = DirectoryReference.Combine(TargetDir, ProjectFile.GetFileNameWithoutExtension()); // Get the path to the receipt string ReceiptFileName = TargetReceipt.GetDefaultPath(SourceProjectDir.FullName, Parameters.Target, Parameters.Platform, Parameters.Configuration, Parameters.Architecture); // Try to load it TargetReceipt Receipt; if(!TargetReceipt.TryRead(ReceiptFileName, out Receipt)) { CommandUtils.LogError("Couldn't read receipt '{0}'", ReceiptFileName); return false; } // Expand all the paths from the receipt Receipt.ExpandPathVariables(SourceEngineDir, SourceProjectDir); // Stage all the build products needed at runtime HashSet<FileReference> SourceFiles = new HashSet<FileReference>(); foreach(BuildProduct BuildProduct in Receipt.BuildProducts.Where(x => x.Type != BuildProductType.StaticLibrary && x.Type != BuildProductType.ImportLibrary)) { SourceFiles.Add(new FileReference(BuildProduct.Path)); } foreach(RuntimeDependency RuntimeDependency in Receipt.RuntimeDependencies.Where(x => x.Type != StagedFileType.UFS)) { SourceFiles.UnionWith(CommandUtils.ResolveFilespec(CommandUtils.RootDirectory, RuntimeDependency.Path, new string[]{ ".../*.umap", ".../*.uasset" })); } // Get all the target files List<FileReference> TargetFiles = new List<FileReference>(); foreach(FileReference SourceFile in SourceFiles) { // Get the destination file to copy to, mapping to the new engine and project directories as appropriate FileReference TargetFile; if(SourceFile.IsUnderDirectory(SourceEngineDir)) { TargetFile = FileReference.Combine(TargetEngineDir, SourceFile.MakeRelativeTo(SourceEngineDir)); } else { TargetFile = FileReference.Combine(TargetProjectDir, SourceFile.MakeRelativeTo(SourceProjectDir)); } // Only copy the output file if it doesn't already exist. We can stage multiple targets to the same output directory. if(Parameters.Overwrite || !TargetFile.Exists()) { TargetFile.Directory.CreateDirectory(); CommandUtils.CopyFile(SourceFile.FullName, TargetFile.FullName); } // Add it to the list of target files TargetFiles.Add(TargetFile); } // Apply the optional tag to the build products foreach(string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(TargetFiles); } // Add the target file to the list of build products BuildProducts.UnionWith(TargetFiles); return true; }
private async Task <string> ConvertFile(FileInfo file, string[] supportedFileTypesForRendering, JobContext context) { if (!file.Exists) { throw new FileNotFoundException($"Unable to find file {file.FullName}", file.FullName); } if (!supportedFileTypesForRendering.Contains(file.Extension.Replace(".", "").ToLowerInvariant())) { return(file.FullName); } var targetExtension = GetTargetExtension(file); var convertedFile = await renderEngine.ConvertFile(file.FullName, targetExtension, context); return(convertedFile); }
public static JobContext CreateJobContext(global::System.Guid jobContextID, global::System.Guid runbookVersionID, global::System.Guid tenantID) { JobContext jobContext = new JobContext(); jobContext.JobContextID = jobContextID; jobContext.RunbookVersionID = runbookVersionID; jobContext.TenantID = tenantID; return jobContext; }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> public override void Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet) { // Parse all the source patterns FilePattern SourcePattern = new FilePattern(CommandUtils.RootDirectory, Parameters.From); // Parse the target pattern FilePattern TargetPattern = new FilePattern(CommandUtils.RootDirectory, Parameters.To); // Apply the filter to the source files HashSet <FileReference> Files = null; if (!String.IsNullOrEmpty(Parameters.Files)) { SourcePattern = SourcePattern.AsDirectoryPattern(); Files = ResolveFilespec(SourcePattern.BaseDirectory, Parameters.Files, TagNameToFileSet); } // Build the file mapping Dictionary <FileReference, FileReference> TargetFileToSourceFile = FilePattern.CreateMapping(Files, ref SourcePattern, ref TargetPattern); // If we're not overwriting, remove any files where the destination file already exists. if (!Parameters.Overwrite) { TargetFileToSourceFile = TargetFileToSourceFile.Where(File => { if (FileReference.Exists(File.Key)) { CommandUtils.LogInformation("Not copying existing file {0}", File.Key); return(false); } return(true); }).ToDictionary(Pair => Pair.Key, Pair => Pair.Value); } // Check we got some files if (TargetFileToSourceFile.Count == 0) { CommandUtils.LogInformation("No files found matching '{0}'", SourcePattern); return; } // If the target is on a network share, retry creating the first directory until it succeeds DirectoryReference FirstTargetDirectory = TargetFileToSourceFile.First().Key.Directory; if (!DirectoryReference.Exists(FirstTargetDirectory)) { const int MaxNumRetries = 15; for (int NumRetries = 0;; NumRetries++) { try { DirectoryReference.CreateDirectory(FirstTargetDirectory); if (NumRetries == 1) { Log.TraceInformation("Created target directory {0} after 1 retry.", FirstTargetDirectory); } else if (NumRetries > 1) { Log.TraceInformation("Created target directory {0} after {1} retries.", FirstTargetDirectory, NumRetries); } break; } catch (Exception Ex) { if (NumRetries == 0) { Log.TraceInformation("Unable to create directory '{0}' on first attempt. Retrying {1} times...", FirstTargetDirectory, MaxNumRetries); } Log.TraceLog(" {0}", Ex); if (NumRetries >= 15) { throw new AutomationException(Ex, "Unable to create target directory '{0}' after {1} retries.", FirstTargetDirectory, NumRetries); } Thread.Sleep(2000); } } } // Copy them all KeyValuePair <FileReference, FileReference>[] FilePairs = TargetFileToSourceFile.ToArray(); CommandUtils.LogInformation("Copying {0} file{1} from {2} to {3}...", FilePairs.Length, (FilePairs.Length == 1)? "" : "s", SourcePattern.BaseDirectory, TargetPattern.BaseDirectory); foreach (KeyValuePair <FileReference, FileReference> FilePair in FilePairs) { CommandUtils.LogLog(" {0} -> {1}", FilePair.Value, FilePair.Key); } CommandUtils.ThreadedCopyFiles(FilePairs.Select(x => x.Value.FullName).ToList(), FilePairs.Select(x => x.Key.FullName).ToList(), bQuiet: true); // Update the list of build products BuildProducts.UnionWith(TargetFileToSourceFile.Keys); // Apply the optional output tag to them foreach (string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(TargetFileToSourceFile.Keys); } }
/// <summary>处理一个数据对象</summary> /// <param name="ctx">上下文</param> /// <param name="message">消息</param> /// <returns></returns> protected virtual Boolean ProcessItem(JobContext ctx, String message) => true;
async Task Run(Job job, bool force = false) { using (var scope = _dependencyResolver.BeginScope()) { var context = new JobContext(job); JobResult result = null; try { if (!force && !_jobRootValidator.IsValid(job.RootId)) { _statusChanger.Change(job, JobStatus.Cancelled); _eventStream.Publish<JobCoordinator>(EventType.JobCancelled, EventProperty.Named("Reason", "Cancelled"), EventProperty.JobSnapshot(job)); return; } job = _statusChanger.Change(job, JobStatus.Running); var instance = scope.GetService(job.Type); result = await _methodBinder.Run(instance, job); if (result == null) { _eventStream.Publish<Dispatcher>(EventType.JobAbandoned, EventProperty.Named("Reason", "ReturnedNullTask"), EventProperty.JobSnapshot(job)); } } catch (Exception e) { if (e.IsFatal()) throw; if (!force) { _exceptionFilterDispatcher.Dispatch(job, e, context, scope); _eventStream.Publish<Dispatcher>(e, EventProperty.JobSnapshot(job)); _errorHandlingPolicy.RetryOrPoison(job, e, context); } } if (result != null) CompleteJob(job, result); } }
public async Task FixDuplicateStacks(JobContext context) { _logger.LogInformation("Getting duplicate stacks"); var duplicateStackAgg = await _elasticClient.SearchAsync <Stack>(q => q .QueryOnQueryString("is_deleted:false") .Size(0) .Aggregations(a => a.Terms("stacks", t => t.Field(f => f.DuplicateSignature).MinimumDocumentCount(2).Size(10000)))); _logger.LogRequest(duplicateStackAgg, LogLevel.Trace); var buckets = duplicateStackAgg.Aggregations.Terms("stacks").Buckets; int total = buckets.Count; int processed = 0; int error = 0; long totalUpdatedEventCount = 0; var lastStatus = SystemClock.Now; int batch = 1; while (buckets.Count > 0) { _logger.LogInformation($"Found {buckets.Count} duplicate stacks in batch #{batch}."); await RenewLockAsync(context); foreach (var duplicateSignature in buckets) { string projectId = null; string signature = null; try { var parts = duplicateSignature.Key.Split(':'); if (parts.Length != 2) { _logger.LogError("Error parsing duplicate signature {DuplicateSignature}", duplicateSignature.Key); continue; } projectId = parts[0]; signature = parts[1]; var stacks = await _stackRepository.FindAsync(q => q.Project(projectId).FilterExpression($"signature_hash:{signature}")); if (stacks.Documents.Count < 2) { _logger.LogError("Did not find multiple stacks with signature {SignatureHash} and project {ProjectId}", signature, projectId); continue; } var eventCounts = await _eventRepository.CountAsync(q => q.Stack(stacks.Documents.Select(s => s.Id)).AggregationsExpression("terms:stack_id")); var eventCountBuckets = eventCounts.Aggregations.Terms("terms_stack_id")?.Buckets ?? new List <Foundatio.Repositories.Models.KeyedBucket <string> >(); // we only need to update events if more than one stack has events associated to it bool shouldUpdateEvents = eventCountBuckets.Count > 1; // default to using the oldest stack var targetStack = stacks.Documents.OrderBy(s => s.CreatedUtc).First(); var duplicateStacks = stacks.Documents.OrderBy(s => s.CreatedUtc).Skip(1).ToList(); // use the stack that has the most events on it so we can reduce the number of updates if (eventCountBuckets.Count > 0) { var targetStackId = eventCountBuckets.OrderByDescending(b => b.Total).First().Key; targetStack = stacks.Documents.Single(d => d.Id == targetStackId); duplicateStacks = stacks.Documents.Where(d => d.Id != targetStackId).ToList(); } targetStack.CreatedUtc = stacks.Documents.Min(d => d.CreatedUtc); targetStack.Status = stacks.Documents.FirstOrDefault(d => d.Status != StackStatus.Open)?.Status ?? StackStatus.Open; targetStack.LastOccurrence = stacks.Documents.Max(d => d.LastOccurrence); targetStack.SnoozeUntilUtc = stacks.Documents.Max(d => d.SnoozeUntilUtc); targetStack.DateFixed = stacks.Documents.Max(d => d.DateFixed);; targetStack.TotalOccurrences += duplicateStacks.Sum(d => d.TotalOccurrences); targetStack.Tags.AddRange(duplicateStacks.SelectMany(d => d.Tags)); targetStack.References = stacks.Documents.SelectMany(d => d.References).Distinct().ToList(); targetStack.OccurrencesAreCritical = stacks.Documents.Any(d => d.OccurrencesAreCritical); duplicateStacks.ForEach(s => s.IsDeleted = true); await _stackRepository.SaveAsync(duplicateStacks); await _stackRepository.SaveAsync(targetStack); processed++; long eventsToMove = eventCountBuckets.Where(b => b.Key != targetStack.Id).Sum(b => b.Total) ?? 0; _logger.LogInformation("De-duped stack: Target={TargetId} Events={EventCount} Dupes={DuplicateIds} HasEvents={HasEvents}", targetStack.Id, eventsToMove, duplicateStacks.Select(s => s.Id), shouldUpdateEvents); if (shouldUpdateEvents) { var response = await _elasticClient.UpdateByQueryAsync <PersistentEvent>(u => u .Query(q => q.Bool(b => b.Must(m => m .Terms(t => t.Field(f => f.StackId).Terms(duplicateStacks.Select(s => s.Id))) ))) .Script(s => s.Source($"ctx._source.stack_id = '{targetStack.Id}'").Lang(ScriptLang.Painless)) .Conflicts(Elasticsearch.Net.Conflicts.Proceed) .WaitForCompletion(false)); _logger.LogRequest(response, LogLevel.Trace); var taskStartedTime = SystemClock.Now; var taskId = response.Task; int attempts = 0; long affectedRecords = 0; do { attempts++; var taskStatus = await _elasticClient.Tasks.GetTaskAsync(taskId); var status = taskStatus.Task.Status; if (taskStatus.Completed) { // TODO: need to check to see if the task failed or completed successfully. Throw if it failed. if (SystemClock.Now.Subtract(taskStartedTime) > TimeSpan.FromSeconds(30)) { _logger.LogInformation("Script operation task ({TaskId}) completed: Created: {Created} Updated: {Updated} Deleted: {Deleted} Conflicts: {Conflicts} Total: {Total}", taskId, status.Created, status.Updated, status.Deleted, status.VersionConflicts, status.Total); } affectedRecords += status.Created + status.Updated + status.Deleted; break; } if (SystemClock.Now.Subtract(taskStartedTime) > TimeSpan.FromSeconds(30)) { await RenewLockAsync(context); _logger.LogInformation("Checking script operation task ({TaskId}) status: Created: {Created} Updated: {Updated} Deleted: {Deleted} Conflicts: {Conflicts} Total: {Total}", taskId, status.Created, status.Updated, status.Deleted, status.VersionConflicts, status.Total); } var delay = TimeSpan.FromMilliseconds(50); if (attempts > 20) { delay = TimeSpan.FromSeconds(5); } else if (attempts > 10) { delay = TimeSpan.FromSeconds(1); } else if (attempts > 5) { delay = TimeSpan.FromMilliseconds(250); } await Task.Delay(delay); } while (true); _logger.LogInformation("Migrated stack events: Target={TargetId} Events={UpdatedEvents} Dupes={DuplicateIds}", targetStack.Id, affectedRecords, duplicateStacks.Select(s => s.Id)); totalUpdatedEventCount += affectedRecords; } if (SystemClock.UtcNow.Subtract(lastStatus) > TimeSpan.FromSeconds(5)) { lastStatus = SystemClock.UtcNow; _logger.LogInformation("Total={Processed}/{Total} Errors={ErrorCount}", processed, total, error); await _cacheClient.RemoveByPrefixAsync(nameof(Stack)); } } catch (Exception ex) { error++; _logger.LogError(ex, "Error fixing duplicate stack {ProjectId} {SignatureHash}", projectId, signature); } } await _elasticClient.Indices.RefreshAsync(_config.Stacks.VersionedName); duplicateStackAgg = await _elasticClient.SearchAsync <Stack>(q => q .QueryOnQueryString("is_deleted:false") .Size(0) .Aggregations(a => a.Terms("stacks", t => t.Field(f => f.DuplicateSignature).MinimumDocumentCount(2).Size(10000)))); _logger.LogRequest(duplicateStackAgg, LogLevel.Trace); buckets = duplicateStackAgg.Aggregations.Terms("stacks").Buckets; total += buckets.Count; batch++; _logger.LogInformation("Done de-duping stacks: Total={Processed}/{Total} Errors={ErrorCount}", processed, total, error); await _cacheClient.RemoveByPrefixAsync(nameof(Stack)); } }
public async Task Run(JobContext <CrunchTheNumbers> context) { await Task.Delay(context.Job.Duration); }
private Task RenewLockAsync(JobContext context) { _lastRun = SystemClock.UtcNow; return(context.RenewLockAsync()); }
public override void OnStop(JobContext jobContext) { _logger.LogInformation(nameof(OnStop)); }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> public override void Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet) { // Get the project file HashSet <FileReference> ProjectFiles = ResolveFilespec(CommandUtils.RootDirectory, Parameters.Project, TagNameToFileSet); foreach (FileReference ProjectFile in ProjectFiles) { if (!FileReference.Exists(ProjectFile)) { throw new AutomationException("Couldn't find project file '{0}'", ProjectFile.FullName); } if (!ProjectFile.HasExtension(".csproj")) { throw new AutomationException("File '{0}' is not a C# project", ProjectFile.FullName); } } // Get the default properties Dictionary <string, string> Properties = new Dictionary <string, string>(StringComparer.InvariantCultureIgnoreCase); if (!String.IsNullOrEmpty(Parameters.Platform)) { Properties["Platform"] = Parameters.Platform; } if (!String.IsNullOrEmpty(Parameters.Configuration)) { Properties["Configuration"] = Parameters.Configuration; } // Build the arguments and run the build if (!Parameters.EnumerateOnly) { List <string> Arguments = new List <string>(); foreach (KeyValuePair <string, string> PropertyPair in Properties) { Arguments.Add(String.Format("/property:{0}={1}", CommandUtils.MakePathSafeToUseWithCommandLine(PropertyPair.Key), CommandUtils.MakePathSafeToUseWithCommandLine(PropertyPair.Value))); } if (!String.IsNullOrEmpty(Parameters.Arguments)) { Arguments.Add(Parameters.Arguments); } if (!String.IsNullOrEmpty(Parameters.Target)) { Arguments.Add(String.Format("/target:{0}", CommandUtils.MakePathSafeToUseWithCommandLine(Parameters.Target))); } Arguments.Add("/verbosity:minimal"); Arguments.Add("/nologo"); foreach (FileReference ProjectFile in ProjectFiles) { CommandUtils.MsBuild(CommandUtils.CmdEnv, ProjectFile.FullName, String.Join(" ", Arguments), null); } } // Try to figure out the output files HashSet <FileReference> ProjectBuildProducts; HashSet <FileReference> ProjectReferences; FindBuildProductsAndReferences(ProjectFiles, Properties, out ProjectBuildProducts, out ProjectReferences); // Apply the optional tag to the produced archive foreach (string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(ProjectBuildProducts); } // Apply the optional tag to any references if (!String.IsNullOrEmpty(Parameters.TagReferences)) { foreach (string TagName in FindTagNamesFromList(Parameters.TagReferences)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(ProjectReferences); } } // Merge them into the standard set of build products BuildProducts.UnionWith(ProjectBuildProducts); BuildProducts.UnionWith(ProjectReferences); }
public void Run(JobContext context) { (new EmailSendService()).StartSendMail(); context.Message = " The Job run finish, status is OK."; Console.Write(context.Message); }
public EmployeeService(JobContext context) { _context = context; }
public void Execute(JobContext context) { ((EventWaitHandle) context.Parameter).Set(); }
public void ShowMessage(JobContext context, string message) { context.Message = message; // Console.WriteLine(context.Message); }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> public override void Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet) { // Find the directories we're going to rebase relative to HashSet <DirectoryReference> RebaseDirs = new HashSet <DirectoryReference> { CommandUtils.RootDirectory }; if (Parameters.RebaseDir != null) { RebaseDirs.UnionWith(SplitDelimitedList(Parameters.RebaseDir).Select(x => ResolveDirectory(x))); } // Get the output parameter FileReference OutputFile = ResolveFile(Parameters.Output); // Check for a ResponseFile parameter FileReference ResponseFile = null; if (!String.IsNullOrEmpty(Parameters.ResponseFile)) { ResponseFile = ResolveFile(Parameters.ResponseFile); } if (ResponseFile == null) { // Get a unique filename for the response file ResponseFile = FileReference.Combine(new DirectoryReference(CommandUtils.CmdEnv.LogFolder), String.Format("PakList_{0}.txt", OutputFile.GetFileNameWithoutExtension())); for (int Idx = 2; FileReference.Exists(ResponseFile); Idx++) { ResponseFile = FileReference.Combine(ResponseFile.Directory, String.Format("PakList_{0}_{1}.txt", OutputFile.GetFileNameWithoutExtension(), Idx)); } // Write out the response file HashSet <FileReference> Files = ResolveFilespec(CommandUtils.RootDirectory, Parameters.Files, TagNameToFileSet); using (StreamWriter Writer = new StreamWriter(ResponseFile.FullName, false, new System.Text.UTF8Encoding(true))) { foreach (FileReference File in Files) { string RelativePath = FindShortestRelativePath(File, RebaseDirs); if (RelativePath == null) { throw new AutomationException("Couldn't find relative path for '{0}' - not under any rebase directories", File.FullName); } Writer.WriteLine("\"{0}\" \"{1}\"{2}", File.FullName, RelativePath, Parameters.Compress ? " -compress" : ""); } } } // Format the command line StringBuilder CommandLine = new StringBuilder(); CommandLine.AppendFormat("{0} -create={1}", CommandUtils.MakePathSafeToUseWithCommandLine(OutputFile.FullName), CommandUtils.MakePathSafeToUseWithCommandLine(ResponseFile.FullName)); if (Parameters.Sign != null) { CommandLine.AppendFormat(" -sign={0}", CommandUtils.MakePathSafeToUseWithCommandLine(ResolveFile(Parameters.Sign).FullName)); } if (Parameters.Order != null) { CommandLine.AppendFormat(" -order={0}", CommandUtils.MakePathSafeToUseWithCommandLine(ResolveFile(Parameters.Order).FullName)); } if (GlobalCommandLine.Installed) { CommandLine.Append(" -installed"); } if (GlobalCommandLine.UTF8Output) { CommandLine.AppendFormat(" -UTF8Output"); } // Get the executable path FileReference UnrealPakExe; if (HostPlatform.Current.HostEditorPlatform == UnrealTargetPlatform.Win64) { UnrealPakExe = ResolveFile("Engine/Binaries/Win64/UnrealPak.exe"); } else { UnrealPakExe = ResolveFile(String.Format("Engine/Binaries/{0}/UnrealPak", HostPlatform.Current.HostEditorPlatform.ToString())); } // Run it CommandUtils.Log("Running '{0} {1}'", CommandUtils.MakePathSafeToUseWithCommandLine(UnrealPakExe.FullName), CommandLine.ToString()); CommandUtils.RunAndLog(CommandUtils.CmdEnv, UnrealPakExe.FullName, CommandLine.ToString(), Options: CommandUtils.ERunOptions.Default | CommandUtils.ERunOptions.UTF8Output); BuildProducts.Add(OutputFile); // Apply the optional tag to the output file foreach (string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).Add(OutputFile); } }
//private readonly IJobIdentifierSequenceRepository _jobIdentifierSequenceRepository; public JobRepository(JobContext jobContext, IJobIdentifierSequenceRepository jobIdentifierSequenceRepository) { _jobContext = jobContext ?? throw new ArgumentNullException(nameof(jobContext)); //_jobIdentifierSequenceRepository = jobIdentifierSequenceRepository; }
public void Execute(JobContext context) { ((Action<JobContext>)context.Parameter).Invoke(context); }
public override int Process(string arg, JobContext context) { return(context.Get <int>("DUMMY_VALUE")); }
public void AddToJobContexts(JobContext jobContext) { base.AddObject("JobContexts", jobContext); }
public override string Process(int arg, JobContext context) { return(arg.ToString().ToUpper()); }