public S3Configuration(GameServerManager manager)
 {
     CredentialsFilePath = CommandUtils.CombinePaths(BuildPatchToolStagingInfo.GetBuildRootPath(), "Utilities", "S3Credentials.txt");
     CredentialsKey      = string.Format("{0}_server", manager.GameName).ToLower();
     AWSRegion           = "us-east-1";
     CommandUtils.Log("Using credentials key {0} for region {1} from {2}", CredentialsKey, AWSRegion, CredentialsFilePath);
 }
        /// <summary>
        /// Execute the task.
        /// </summary>
        /// <param name="Job">Information about the current job</param>
        /// <param name="BuildProducts">Set of build products produced by this node.</param>
        /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param>
        /// <returns>True if the task succeeded</returns>
        public override bool Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet)
        {
            DirectoryReference        CloudDir    = new DirectoryReference(Parameters.CloudDir);
            BuildPatchToolStagingInfo StagingInfo = new BuildPatchToolStagingInfo(Job.OwnerCommand, Parameters.AppName, 1, Parameters.BuildVersion, Parameters.Platform, null, CloudDir);

            BuildInfoPublisherBase.Get().PostBuildInfo(StagingInfo, Parameters.McpConfig);
            return(true);
        }
        /// <summary>
        /// Execute the task.
        /// </summary>
        /// <param name="Job">Information about the current job</param>
        /// <param name="BuildProducts">Set of build products produced by this node.</param>
        /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param>
        /// <returns>True if the task succeeded</returns>
        public override bool Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet)
        {
            BuildPatchToolStagingInfo StagingInfo = new BuildPatchToolStagingInfo(Job.OwnerCommand, Parameters.AppName, 1, Parameters.BuildVersion, Parameters.Platform, null, null);
            string LabelWithPlatform = BuildInfoPublisherBase.Get().GetLabelWithPlatform(Parameters.Label, Parameters.Platform);

            BuildInfoPublisherBase.Get().LabelBuild(StagingInfo, LabelWithPlatform, Parameters.McpConfig);
            return(true);
        }
        public void DeployLinuxServerS3(string BuildVersion)
        {
            string BuildBaseDir = CommandUtils.CombinePaths(
                BuildPatchToolStagingInfo.GetBuildRootPath(),
                GameName,
                BuildVersion
                );
            string LinuxServerBaseDir = CommandUtils.CombinePaths(BuildBaseDir, "LinuxServer");
            string ServerZipFilePath  = CommandUtils.CombinePaths(BuildBaseDir, "LinuxServer.zip");

            if (CommandUtils.FileExists_NoExceptions(ServerZipFilePath))
            {
                CommandUtils.Log("Skipping creating server zip file {0}, as it already exists.", ServerZipFilePath);
            }
            else
            {
                CommandUtils.Log("Compressing Linux server binaries to {0}", ServerZipFilePath);
                CommandUtils.ZipFiles(ServerZipFilePath, LinuxServerBaseDir, new FileFilter(FileFilterType.Include));
                CommandUtils.Log("Completed compressing Linux server binaries.");
            }

            S3Configuration  S3Config = new S3Configuration(this);
            CloudStorageBase S3       = S3Config.GetConnection();

            string S3Filename = string.Format("LinuxServer-{0}.zip", Changelist);
            bool   bSuccess   = false;
            int    Retries    = 0;
            int    MaxRetries = 5;

            do
            {
                CommandUtils.Log("Uploading server binaries zip file to Amazon S3 bucket {0}.", S3BucketName);
                bSuccess = S3.PostFile(S3BucketName, S3Filename, ServerZipFilePath, "application/zip").bSuccess;
                if (!bSuccess)
                {
                    bool bDoRetry = Retries + 1 < MaxRetries;
                    CommandUtils.LogWarning("Failed to post server binaries to S3 (attempt {0} of {1}). {2}.",
                                            Retries + 1,
                                            MaxRetries,
                                            bDoRetry ? "Sleeping for ten seconds before retrying" : "Not retrying"
                                            );
                    if (bDoRetry)
                    {
                        Thread.Sleep(10000);
                    }
                }
            } while (!bSuccess && Retries++ < MaxRetries);

            if (!bSuccess)
            {
                throw new AutomationException("Could not upload server binaries to S3.");
            }

            CommandUtils.Log("Server binaries uploaded successfully to S3.");
        }
        /// <summary>
        /// Execute the task.
        /// </summary>
        /// <param name="Job">Information about the current job</param>
        /// <param name="BuildProducts">Set of build products produced by this node.</param>
        /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param>
        /// <returns>True if the task succeeded</returns>
        public override bool Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet)
        {
            // Get the build directory
            DirectoryReference InputDir = ResolveDirectory(Parameters.InputDir);

            // If there's a set of files specified, generate a temporary ignore list.
            FileReference IgnoreList = null;

            if (Parameters.Files != null)
            {
                // Find the files which are to be included
                HashSet <FileReference> IncludeFiles = ResolveFilespec(InputDir, Parameters.Files, TagNameToFileSet);

                // Create a file to store the ignored file list
                IgnoreList = new FileReference(LogUtils.GetUniqueLogName(Path.Combine(CommandUtils.CmdEnv.LogFolder, Parameters.AppName + "-Ignore")));
                using (StreamWriter Writer = new StreamWriter(IgnoreList.FullName))
                {
                    DirectoryInfo InputDirInfo = new DirectoryInfo(InputDir.FullName);
                    foreach (FileInfo File in InputDirInfo.EnumerateFiles("*", SearchOption.AllDirectories))
                    {
                        string       RelativePath          = new FileReference(File).MakeRelativeTo(InputDir);
                        const string Iso8601DateTimeFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'.'fffZ";
                        Writer.WriteLine("\"{0}\"\t{1}", RelativePath, File.LastWriteTimeUtc.ToString(Iso8601DateTimeFormat));
                    }
                }
            }

            // Create the staging info
            BuildPatchToolStagingInfo StagingInfo = new BuildPatchToolStagingInfo(Job.OwnerCommand, Parameters.AppName, 1, Parameters.BuildVersion, Parameters.Platform, Parameters.CloudDir);

            // Set the patch generation options
            BuildPatchToolBase.PatchGenerationOptions Options = new BuildPatchToolBase.PatchGenerationOptions();
            Options.StagingInfo      = StagingInfo;
            Options.BuildRoot        = ResolveDirectory(Parameters.InputDir).FullName;
            Options.FileIgnoreList   = (IgnoreList != null)? IgnoreList.FullName : null;
            Options.AppLaunchCmd     = Parameters.Launch ?? "";
            Options.AppLaunchCmdArgs = Parameters.LaunchArgs ?? "";
            Options.AppChunkType     = BuildPatchToolBase.ChunkType.Chunk;

            // Run the chunking
            BuildPatchToolBase.Get().Execute(Options);
            return(true);
        }
 private string GetUtilitiesFilePath(string Filename)
 {
     return(CommandUtils.CombinePaths(BuildPatchToolStagingInfo.GetBuildRootPath(), "Utilities", Filename));
 }