/// <summary> /// Creates a pre-configured <see cref="Process"/> pointing to dmm-tools.exe /// </summary> /// <param name="output">A <see cref="StringBuilder"/> used to accept stdout</param> /// <param name="errorOutput">A <see cref="StringBuilder"/> used to accept stderr</param> /// <returns>A <see cref="Task"/> resulting in a pre-configured <see cref="Process"/> pointing to dmm-tools.exe</returns> Process CreateDMMToolsProcess(StringBuilder output, StringBuilder errorOutput) { var P = new Process(); P.StartInfo.RedirectStandardOutput = true; P.StartInfo.RedirectStandardError = true; P.StartInfo.UseShellExecute = false; P.StartInfo.WorkingDirectory = ioManager.ResolvePath("."); P.OutputDataReceived += new DataReceivedEventHandler( delegate(object sender, DataReceivedEventArgs e) { output.Append(Environment.NewLine); output.Append(e.Data); } ); P.ErrorDataReceived += new DataReceivedEventHandler( delegate(object sender, DataReceivedEventArgs e) { errorOutput.Append(Environment.NewLine); errorOutput.Append(e.Data); } ); try { P.StartInfo.FileName = ioManager.ConcatPath(ioManager.GetDirectoryName(Assembly.GetExecutingAssembly().Location), DMMToolsPath); } catch { P.Dispose(); throw; } return(P); }
/// <inheritdoc /> public Task InstallByond(string path, Version version, CancellationToken cancellationToken) { if (path == null) { throw new ArgumentNullException(nameof(path)); } if (version == null) { throw new ArgumentNullException(nameof(version)); } // write the scripts for running the ting // need to add $ORIGIN to LD_LIBRARY_PATH const string StandardScript = "#!/bin/sh\nexport LD_LIBRARY_PATH=\"\\$ORIGIN:$LD_LIBRARY_PATH\"\nBASEDIR=$(dirname \"$0\")\nexec \"$BASEDIR/{0}\" \"$@\"\n"; var dreamDaemonScript = String.Format(CultureInfo.InvariantCulture, StandardScript, DreamDaemonExecutableName); var dreamMakerScript = String.Format(CultureInfo.InvariantCulture, StandardScript, DreamMakerExecutableName); async Task WriteAndMakeExecutable(string fullPath, string script) { await ioManager.WriteAllBytes(fullPath, Encoding.ASCII.GetBytes(script), cancellationToken).ConfigureAwait(false); postWriteHandler.HandleWrite(fullPath); } var basePath = ioManager.ConcatPath(path, ByondManager.BinPath); var task = Task.WhenAll(WriteAndMakeExecutable(ioManager.ConcatPath(basePath, DreamDaemonName), dreamDaemonScript), WriteAndMakeExecutable(ioManager.ConcatPath(basePath, DreamMakerName), dreamMakerScript)); postWriteHandler.HandleWrite(ioManager.ConcatPath(basePath, DreamDaemonExecutableName)); postWriteHandler.HandleWrite(ioManager.ConcatPath(basePath, DreamMakerExecutableName)); return(task); }
/// <inheritdoc /> public async Task <IJsonTrackingContext> TrackJsons(string basePath, string channelsJsonName, string commandsJsonName, CancellationToken cancellationToken) { if (customCommandHandler == null) { throw new InvalidOperationException("RegisterCommandHandler() hasn't been called!"); } JsonTrackingContext context = null; context = new JsonTrackingContext(ioManager, customCommandHandler, () => { lock (trackingContexts) trackingContexts.Remove(context); }, ioManager.ConcatPath(basePath, commandsJsonName), ioManager.ConcatPath(basePath, channelsJsonName)); Task task; lock (trackingContexts) { trackingContexts.Add(context); lock (mappedChannels) task = Task.WhenAll(trackingContexts.Select(x => x.SetChannels(mappedChannels.Select(y => y.Value.Channel), cancellationToken))); } await task.ConfigureAwait(false); return(context); }
/// <inheritdoc /> public async Task <ServerSideModifications> CopyDMFilesTo(string dmeFile, string destination, CancellationToken cancellationToken) { using (await SemaphoreSlimContext.Lock(semaphore, cancellationToken).ConfigureAwait(false)) { await EnsureDirectories(cancellationToken).ConfigureAwait(false); // just assume no other fs race conditions here var dmeExistsTask = ioManager.FileExists(ioManager.ConcatPath(CodeModificationsSubdirectory, dmeFile), cancellationToken); var headFileExistsTask = ioManager.FileExists(ioManager.ConcatPath(CodeModificationsSubdirectory, CodeModificationsHeadFile), cancellationToken); var tailFileExistsTask = ioManager.FileExists(ioManager.ConcatPath(CodeModificationsSubdirectory, CodeModificationsTailFile), cancellationToken); var copyTask = ioManager.CopyDirectory(CodeModificationsSubdirectory, destination, null, cancellationToken); await Task.WhenAll(dmeExistsTask, headFileExistsTask, tailFileExistsTask, copyTask).ConfigureAwait(false); if (!dmeExistsTask.Result && !headFileExistsTask.Result && !tailFileExistsTask.Result) { return(null); } if (dmeExistsTask.Result) { return(new ServerSideModifications(null, null, true)); } if (!headFileExistsTask.Result && !tailFileExistsTask.Result) { return(null); }
/// <summary> /// Adds server side includes to the .dme being compiled /// </summary> /// <param name="job">The <see cref="CompileJob"/> for the operation</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the operation</param> /// <returns>A <see cref="Task"/> representing the running operation</returns> async Task ModifyDme(Models.CompileJob job, CancellationToken cancellationToken) { var dmeFileName = String.Join('.', job.DmeName, DmeExtension); var dmePath = ioManager.ConcatPath(job.DirectoryName.ToString(), dmeFileName); var dmeReadTask = ioManager.ReadAllBytes(dmePath, cancellationToken); var dmeModificationsTask = configuration.CopyDMFilesTo(dmeFileName, ioManager.ResolvePath(job.DirectoryName.ToString()), cancellationToken); var dmeBytes = await dmeReadTask.ConfigureAwait(false); var dme = Encoding.UTF8.GetString(dmeBytes); var dmeModifications = await dmeModificationsTask.ConfigureAwait(false); if (dmeModifications == null || dmeModifications.TotalDmeOverwrite) { if (dmeModifications != null) { logger.LogDebug(".dme replacement configured!"); } else { logger.LogTrace("No .dme modifications required."); } return; } var dmeLines = new List <string>(dme.Split('\n', StringSplitOptions.None)); for (var I = 0; I < dmeLines.Count; ++I) { var line = dmeLines[I]; if (line.Contains("BEGIN_INCLUDE", StringComparison.Ordinal) && dmeModifications.HeadIncludeLine != null) { var headIncludeLineNumber = I + 1; logger.LogDebug( "Inserting HeadInclude.dm at line {0}: {1}", headIncludeLineNumber, dmeModifications.HeadIncludeLine); dmeLines.Insert(headIncludeLineNumber, dmeModifications.HeadIncludeLine); ++I; } else if (line.Contains("END_INCLUDE", StringComparison.Ordinal) && dmeModifications.TailIncludeLine != null) { logger.LogDebug( "Inserting TailInclude.dm at line {0}: {1}", I, dmeModifications.TailIncludeLine); dmeLines.Insert(I, dmeModifications.TailIncludeLine); break; } } dmeBytes = Encoding.UTF8.GetBytes(String.Join(Environment.NewLine, dmeLines)); await ioManager.WriteAllBytes(dmePath, dmeBytes, cancellationToken).ConfigureAwait(false); }
/// <inheritdoc /> public async Task CleanCache(CancellationToken cancellationToken) { try { await ioManager.DeleteDirectory(ioManager.ConcatPath(Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments), "byond/cache"), cancellationToken).ConfigureAwait(false); } catch (Exception e) { logger.LogWarning("Error deleting BYOND cache! Exception: {0}", e); } }
/// <inheritdoc /> public async Task <IByondExecutableLock> UseExecutables(Version requiredVersion, CancellationToken cancellationToken) { var versionToUse = requiredVersion ?? ActiveVersion; if (versionToUse == null) { throw new JobException(ErrorCode.ByondNoVersionsInstalled); } await InstallVersion(versionToUse, null, cancellationToken).ConfigureAwait(false); var versionKey = VersionKey(versionToUse, true); var binPathForVersion = ioManager.ConcatPath(versionKey, BinPath); logger.LogTrace("Creating ByondExecutableLock lock for version {0}", versionToUse); return(new ByondExecutableLock( ioManager, semaphore, versionToUse, ioManager.ResolvePath( ioManager.ConcatPath( binPathForVersion, byondInstaller.DreamDaemonName)), ioManager.ResolvePath( ioManager.ConcatPath( binPathForVersion, byondInstaller.DreamMakerName)), ioManager.ResolvePath( ioManager.ConcatPath( byondInstaller.PathToUserByondFolder, CfgDirectoryName, TrustedDmbFileName)))); }
/// <inheritdoc /> public async Task <IDmbProvider> FromCompileJob(CompileJob compileJob, CancellationToken cancellationToken) { logger.LogTrace("Loading compile job {0}...", compileJob.Id); var providerSubmitted = false; var newProvider = new DmbProvider(compileJob, ioManager, () => { if (providerSubmitted) { CleanJob(compileJob); } }); try { var primaryCheckTask = ioManager.FileExists(ioManager.ConcatPath(newProvider.PrimaryDirectory, newProvider.DmbName), cancellationToken); var secondaryCheckTask = ioManager.FileExists(ioManager.ConcatPath(newProvider.PrimaryDirectory, newProvider.DmbName), cancellationToken); if (!(await primaryCheckTask.ConfigureAwait(false) && await secondaryCheckTask.ConfigureAwait(false))) { logger.LogWarning("Error loading compile job, .dmb missing!"); return(null); //omae wa mou shinderu } lock (this) { if (!jobLockCounts.TryGetValue(compileJob.Id, out int value)) { value = 1; jobLockCounts.Add(compileJob.Id, 1); } else { jobLockCounts[compileJob.Id] = ++value; } logger.LogTrace("Compile job {0} lock count now: {1}", compileJob.Id, value); providerSubmitted = true; return(newProvider); } } finally { if (!providerSubmitted) { newProvider.Dispose(); } } }
/// <summary> /// Gets the evaluated log <see cref="Directory"/>. /// </summary> /// <param name="ioManager">The <see cref="IIOManager"/> to use.</param> /// <param name="assemblyInformationProvider">The <see cref="IAssemblyInformationProvider"/> to use.</param> /// <param name="platformIdentifier">The <see cref="IPlatformIdentifier"/> to use</param> /// <returns>The evaluated log <see cref="Directory"/>.</returns> public string GetFullLogDirectory( IIOManager ioManager, IAssemblyInformationProvider assemblyInformationProvider, IPlatformIdentifier platformIdentifier) { if (ioManager == null) { throw new ArgumentNullException(nameof(ioManager)); } if (assemblyInformationProvider == null) { throw new ArgumentNullException(nameof(assemblyInformationProvider)); } if (platformIdentifier == null) { throw new ArgumentNullException(nameof(platformIdentifier)); } var directoryToUse = platformIdentifier.IsWindows ? Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData) // C:/ProgramData : "/var/log"; // :pain: return(!String.IsNullOrEmpty(Directory) ? Directory : ioManager.ConcatPath( directoryToUse, assemblyInformationProvider.VersionPrefix)); }
public Task <IActionResult> ListLogs([FromQuery] int?page, [FromQuery] int?pageSize, CancellationToken cancellationToken) => Paginated( async() => { var path = fileLoggingConfiguration.GetFullLogDirectory(ioManager, assemblyInformationProvider, platformIdentifier); try { var files = await ioManager.GetFiles(path, cancellationToken).ConfigureAwait(false); var tasks = files.Select( async file => new LogFileResponse { Name = ioManager.GetFileName(file), LastModified = await ioManager .GetLastModified( ioManager.ConcatPath(path, file), cancellationToken) .ConfigureAwait(false), }) .ToList(); await Task.WhenAll(tasks).ConfigureAwait(false); return(new PaginatableResult <LogFileResponse>( tasks .AsQueryable() .Select(x => x.Result) .OrderByDescending(x => x.Name))); } catch (IOException ex) { return(new PaginatableResult <LogFileResponse>( Conflict(new ErrorMessageResponse(ErrorCode.IOError) { AdditionalData = ex.ToString(), }))); } }, null, page, pageSize, cancellationToken);
public async Task <IActionResult> ListLogs(CancellationToken cancellationToken) { var path = fileLoggingConfiguration.GetFullLogDirectory(ioManager, assemblyInformationProvider, platformIdentifier); try { var files = await ioManager.GetFiles(path, cancellationToken).ConfigureAwait(false); var tasks = files.Select( async file => new LogFile { Name = ioManager.GetFileName(file), LastModified = await ioManager.GetLastModified( ioManager.ConcatPath(path, file), cancellationToken) .ConfigureAwait(false) }) .ToList(); await Task.WhenAll(tasks).ConfigureAwait(false); var result = tasks .Select(x => x.Result) .OrderByDescending(x => x.Name) .ToList(); return(Ok(result)); } catch (IOException ex) { return(Conflict(new ErrorMessage(ErrorCode.IOError) { AdditionalData = ex.ToString() })); } }
/// <inheritdoc /> public async Task <string> ModifyMerge(PullRequest pullRequest, string workingCommit, CancellationToken cancellationToken) { if (pullRequest == null) { throw new ArgumentNullException(nameof(pullRequest)); } if (workingCommit == null) { throw new ArgumentNullException(nameof(workingCommit)); } var changelog = Changelog.GetChangelog(pullRequest, out bool malformed); if (changelog == null) { return(workingCommit); } var result = new List <Dictionary <string, string> >(); result.AddRange(changelog.Changes.Select(x => new Dictionary <string, string> { { x.Type.ToString().ToLowerInvariant(), x.Text } })); //create the object graph var graph = new { author = changelog.Author, delete_after_temporary_for_replacement = true, changes = result }; //hack because '-' isn't a valid identifier in c# var yaml = new Serializer().Serialize(graph).Replace("delete_after_temporary_for_replacement", "delete-after"); var title = String.Format(CultureInfo.InvariantCulture, "AutoChangeLog-pr-{0}.yml", pullRequest.Number); var pathToWrite = ioManager.ConcatPath(repository.Path, "html", "changelogs", title); await ioManager.WriteAllText(pathToWrite, yaml, cancellationToken).ConfigureAwait(false); return(await repository.CommitChanges(new List <string> { pathToWrite }, cancellationToken)); }
/// <inheritdoc /> public async Task <ILocalRepository> GetRepository(Octokit.Repository repository, Func <int, Task> onCloneProgress, Func <Task> onOperationBlocked, CancellationToken cancellationToken) { if (repository == null) { throw new ArgumentNullException(nameof(repository)); } var repoPath = ioManager.ConcatPath(repository.Owner.Login, repository.Name); TaskCompletionSource <object> usageTask = null; try { return(await TryLoadRepository(repoPath, onOperationBlocked, tcs => usageTask = tcs, cancellationToken).ConfigureAwait(false)); } catch (LibGit2SharpException e) { logger.LogWarning(e, "Failed to load repository {0}/{1}! Cloning...", repository.Owner.Login, repository.Name); } //so the repo failed to load and now we're holding our queue spot in usageTask //reclone it try { await ioManager.DeleteDirectory(repoPath, cancellationToken).ConfigureAwait(false); await ioManager.CreateDirectory(repoPath, cancellationToken).ConfigureAwait(false); await repositoryOperations.Clone(repository.CloneUrl, ioManager.ResolvePath(repoPath), onCloneProgress, cancellationToken).ConfigureAwait(false); return(await localRepositoryFactory.CreateLocalRepository(ioManager.ResolvePath(repoPath), usageTask, cancellationToken).ConfigureAwait(false)); } catch { //ok we can't do anything else, clear our queue spot usageTask.SetResult(null); throw; } }
/// <summary> /// Run a quick DD instance to test the DMAPI is installed on the target code /// </summary> /// <param name="timeout">The timeout in seconds for validation</param> /// <param name="securityLevel">The <see cref="DreamDaemonSecurity"/> level to use to validate the API</param> /// <param name="job">The <see cref="Models.CompileJob"/> for the operation</param> /// <param name="byondLock">The current <see cref="IByondExecutableLock"/></param> /// <param name="portToUse">The port to use for API validation</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the operation</param> /// <returns>A <see cref="Task{TResult}"/> resulting in <see langword="true"/> if the DMAPI was successfully validated, <see langword="false"/> otherwise</returns> async Task <bool> VerifyApi(uint timeout, DreamDaemonSecurity securityLevel, Models.CompileJob job, IByondExecutableLock byondLock, ushort portToUse, CancellationToken cancellationToken) { logger.LogTrace("Verifying DMAPI..."); var launchParameters = new DreamDaemonLaunchParameters { AllowWebClient = false, PrimaryPort = portToUse, SecurityLevel = securityLevel, //all it needs to read the file and exit StartupTimeout = timeout }; var dirA = ioManager.ConcatPath(job.DirectoryName.ToString(), ADirectoryName); var provider = new TemporaryDmbProvider(ioManager.ResolvePath(dirA), String.Concat(job.DmeName, DmbExtension), job); var timeoutAt = DateTimeOffset.Now.AddSeconds(timeout); using (var controller = await sessionControllerFactory.LaunchNew(launchParameters, provider, byondLock, true, true, true, cancellationToken).ConfigureAwait(false)) { var launchResult = await controller.LaunchResult.ConfigureAwait(false); var now = DateTimeOffset.Now; if (now < timeoutAt && launchResult.StartupTime.HasValue) { var timeoutTask = Task.Delay(timeoutAt - now, cancellationToken); await Task.WhenAny(controller.Lifetime, timeoutTask).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); } if (!controller.Lifetime.IsCompleted) { logger.LogDebug("API validation timed out!"); return(false); } var validated = controller.ApiValidated; logger.LogTrace("API valid: {0}", validated); return(validated); } }
/// <inheritdoc /> public async Task <ServerSideModifications> CopyDMFilesTo(string dmeFile, string destination, CancellationToken cancellationToken) { await EnsureDirectories(cancellationToken).ConfigureAwait(false); //just assume no other fs race conditions here var dmeExistsTask = ioManager.FileExists(ioManager.ConcatPath(CodeModificationsSubdirectory, dmeFile), cancellationToken); var headFileExistsTask = ioManager.FileExists(ioManager.ConcatPath(CodeModificationsSubdirectory, CodeModificationsHeadFile), cancellationToken); var tailFileExistsTask = ioManager.FileExists(ioManager.ConcatPath(CodeModificationsSubdirectory, CodeModificationsTailFile), cancellationToken); await Task.WhenAll(dmeExistsTask, headFileExistsTask, tailFileExistsTask).ConfigureAwait(false); if (!dmeExistsTask.Result && !headFileExistsTask.Result && !tailFileExistsTask.Result) { return(null); } var copyTask = ioManager.CopyDirectory(CodeModificationsSubdirectory, destination, null, cancellationToken); if (dmeExistsTask.Result) { await copyTask.ConfigureAwait(false); return(new ServerSideModifications(null, null, true)); } if (!headFileExistsTask.Result && !tailFileExistsTask.Result) { await copyTask.ConfigureAwait(false); return(null); } string IncludeLine(string filePath) => String.Format(CultureInfo.InvariantCulture, "#include \"{0}\"", filePath); await copyTask.ConfigureAwait(false); return(new ServerSideModifications(headFileExistsTask.Result ? IncludeLine(CodeModificationsHeadFile) : null, tailFileExistsTask.Result ? IncludeLine(CodeModificationsTailFile) : null, false)); }
/// <inheritdoc /> #pragma warning disable CA1506 // TODO: Decomplexify public async Task <ISessionController> LaunchNew(DreamDaemonLaunchParameters launchParameters, IDmbProvider dmbProvider, IByondExecutableLock currentByondLock, bool primaryPort, bool primaryDirectory, bool apiValidate, CancellationToken cancellationToken) { var portToUse = primaryPort ? launchParameters.PrimaryPort : launchParameters.SecondaryPort; if (!portToUse.HasValue) { throw new InvalidOperationException("Given port is null!"); } var accessIdentifier = cryptographySuite.GetSecureString(); const string JsonPostfix = "tgs.json"; var basePath = primaryDirectory ? dmbProvider.PrimaryDirectory : dmbProvider.SecondaryDirectory; // delete all previous tgs json files var files = await ioManager.GetFilesWithExtension(basePath, JsonPostfix, cancellationToken).ConfigureAwait(false); await Task.WhenAll(files.Select(x => ioManager.DeleteFile(x, cancellationToken))).ConfigureAwait(false); // i changed this back from guids, hopefully i don't regret that string JsonFile(string name) => String.Format(CultureInfo.InvariantCulture, "{0}.{1}", name, JsonPostfix); var securityLevelToUse = launchParameters.SecurityLevel.Value; switch (dmbProvider.CompileJob.MinimumSecurityLevel) { case DreamDaemonSecurity.Ultrasafe: break; case DreamDaemonSecurity.Safe: if (securityLevelToUse == DreamDaemonSecurity.Ultrasafe) { securityLevelToUse = DreamDaemonSecurity.Safe; } break; case DreamDaemonSecurity.Trusted: securityLevelToUse = DreamDaemonSecurity.Trusted; break; default: throw new InvalidOperationException(String.Format(CultureInfo.InvariantCulture, "Invalid DreamDaemonSecurity value: {0}", dmbProvider.CompileJob.MinimumSecurityLevel)); } // setup interop files var interopInfo = new JsonFile { AccessIdentifier = accessIdentifier, ApiValidateOnly = apiValidate, ChatChannelsJson = JsonFile("chat_channels"), ChatCommandsJson = JsonFile("chat_commands"), ServerCommandsJson = JsonFile("server_commands"), InstanceName = instance.Name, SecurityLevel = securityLevelToUse, Revision = new Api.Models.Internal.RevisionInformation { CommitSha = dmbProvider.CompileJob.RevisionInformation.CommitSha, OriginCommitSha = dmbProvider.CompileJob.RevisionInformation.OriginCommitSha } }; interopInfo.TestMerges.AddRange(dmbProvider.CompileJob.RevisionInformation.ActiveTestMerges.Select(x => x.TestMerge).Select(x => new Interop.TestMerge(x, interopInfo.Revision))); var interopJsonFile = JsonFile("interop"); var interopJson = JsonConvert.SerializeObject(interopInfo, new JsonSerializerSettings { ContractResolver = new CamelCasePropertyNamesContractResolver(), ReferenceLoopHandling = ReferenceLoopHandling.Ignore }); var chatJsonTrackingTask = chat.TrackJsons(basePath, interopInfo.ChatChannelsJson, interopInfo.ChatCommandsJson, cancellationToken); await ioManager.WriteAllBytes(ioManager.ConcatPath(basePath, interopJsonFile), Encoding.UTF8.GetBytes(interopJson), cancellationToken).ConfigureAwait(false); var chatJsonTrackingContext = await chatJsonTrackingTask.ConfigureAwait(false); try { // get the byond lock var byondLock = currentByondLock ?? await byond.UseExecutables(Version.Parse(dmbProvider.CompileJob.ByondVersion), cancellationToken).ConfigureAwait(false); try { // create interop context var context = new CommContext(ioManager, loggerFactory.CreateLogger <CommContext>(), basePath, interopInfo.ServerCommandsJson); try { // set command line options // more sanitization here cause it uses the same scheme var parameters = String.Format(CultureInfo.InvariantCulture, "{2}={0}&{3}={1}", byondTopicSender.SanitizeString(application.Version.ToString()), byondTopicSender.SanitizeString(interopJsonFile), byondTopicSender.SanitizeString(Constants.DMParamHostVersion), byondTopicSender.SanitizeString(Constants.DMParamInfoJson)); var visibility = apiValidate ? "invisible" : "public"; // important to run on all ports to allow port changing var arguments = String.Format(CultureInfo.InvariantCulture, "{0} -port {1} -ports 1-65535 {2}-close -{3} -{5} -public -params \"{4}\"", dmbProvider.DmbName, primaryPort ? launchParameters.PrimaryPort : launchParameters.SecondaryPort, launchParameters.AllowWebClient.Value ? "-webclient " : String.Empty, SecurityWord(securityLevelToUse), parameters, visibility); // See https://github.com/tgstation/tgstation-server/issues/719 var noShellExecute = !platformIdentifier.IsWindows; // launch dd var process = processExecutor.LaunchProcess(byondLock.DreamDaemonPath, basePath, arguments, noShellExecute: noShellExecute); try { networkPromptReaper.RegisterProcess(process); // return the session controller for it var result = new SessionController(new ReattachInformation { AccessIdentifier = accessIdentifier, Dmb = dmbProvider, IsPrimary = primaryDirectory, Port = portToUse.Value, ProcessId = process.Id, ChatChannelsJson = interopInfo.ChatChannelsJson, ChatCommandsJson = interopInfo.ChatCommandsJson, ServerCommandsJson = interopInfo.ServerCommandsJson, }, process, byondLock, byondTopicSender, chatJsonTrackingContext, context, chat, loggerFactory.CreateLogger <SessionController>(), launchParameters.SecurityLevel, launchParameters.StartupTimeout); // writeback launch parameter's fixed security level launchParameters.SecurityLevel = securityLevelToUse; return(result); } catch { process.Dispose(); throw; } } catch { context.Dispose(); throw; } } catch { if (currentByondLock == null) { byondLock.Dispose(); } throw; } } catch { chatJsonTrackingContext.Dispose(); throw; } }
/// <summary> /// Get the proper path to <see cref="StaticIgnoreFile"/> /// </summary> /// <returns>The <see cref="ioManager"/> relative path to <see cref="StaticIgnoreFile"/></returns> string StaticIgnorePath() => ioManager.ConcatPath(GameStaticFilesSubdirectory, StaticIgnoreFile);
/// <inheritdoc /> #pragma warning disable CA1506 // TODO: Decomplexify public async Task <IDmbProvider> FromCompileJob(CompileJob compileJob, CancellationToken cancellationToken) { if (compileJob == null) { throw new ArgumentNullException(nameof(compileJob)); } // ensure we have the entire compile job tree await databaseContextFactory.UseContext(async db => compileJob = await db.CompileJobs.Where(x => x.Id == compileJob.Id) .Include(x => x.Job).ThenInclude(x => x.StartedBy) .Include(x => x.RevisionInformation).ThenInclude(x => x.PrimaryTestMerge).ThenInclude(x => x.MergedBy) .Include(x => x.RevisionInformation).ThenInclude(x => x.ActiveTestMerges).ThenInclude(x => x.TestMerge).ThenInclude(x => x.MergedBy) .FirstAsync(cancellationToken).ConfigureAwait(false)).ConfigureAwait(false); // can't wait to see that query logger.LogTrace("Loading compile job {0}...", compileJob.Id); var providerSubmitted = false; var newProvider = new DmbProvider(compileJob, ioManager, () => { if (providerSubmitted) { CleanJob(compileJob); } }); try { var primaryCheckTask = ioManager.FileExists(ioManager.ConcatPath(newProvider.PrimaryDirectory, newProvider.DmbName), cancellationToken); var secondaryCheckTask = ioManager.FileExists(ioManager.ConcatPath(newProvider.PrimaryDirectory, newProvider.DmbName), cancellationToken); if (!(await primaryCheckTask.ConfigureAwait(false) && await secondaryCheckTask.ConfigureAwait(false))) { logger.LogWarning("Error loading compile job, .dmb missing!"); return(null); // omae wa mou shinderu } lock (this) { if (!jobLockCounts.TryGetValue(compileJob.Id, out int value)) { value = 1; jobLockCounts.Add(compileJob.Id, 1); } else { jobLockCounts[compileJob.Id] = ++value; } logger.LogTrace("Compile job {0} lock count now: {1}", compileJob.Id, value); providerSubmitted = true; return(newProvider); } } finally { if (!providerSubmitted) { newProvider.Dispose(); } } }
/// <summary> /// Installs a BYOND <paramref name="version"/> if it isn't already /// </summary> /// <param name="version">The BYOND <see cref="Version"/> to install</param> /// <param name="customVersionStream">Custom zip file <see cref="Stream"/> to use. Will cause a <see cref="Version.Build"/> number to be added.</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the operation</param> /// <returns>A <see cref="Task"/> representing the running operation</returns> async Task <string> InstallVersion(Version version, Stream customVersionStream, CancellationToken cancellationToken) { var ourTcs = new TaskCompletionSource <object>(); Task inProgressTask; string versionKey; bool installed; lock (installedVersions) { if (customVersionStream != null) { int customInstallationNumber = 1; do { versionKey = $"{VersionKey(version, false)}.{customInstallationNumber++}"; }while (installedVersions.ContainsKey(versionKey)); } else { versionKey = VersionKey(version, true); } installed = installedVersions.TryGetValue(versionKey, out inProgressTask); if (!installed) { installedVersions.Add(versionKey, ourTcs.Task); } } if (installed) { using (cancellationToken.Register(() => ourTcs.SetCanceled())) { await Task.WhenAny(ourTcs.Task, inProgressTask).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); return(versionKey); } } if (customVersionStream != null) { logger.LogInformation("Installing custom BYOND version as {0}...", versionKey); } else if (version.Build > 0) { throw new JobException(ErrorCode.ByondNonExistentCustomVersion); } else { logger.LogDebug("Requested BYOND version {0} not currently installed. Doing so now..."); } // okay up to us to install it then try { await eventConsumer.HandleEvent(EventType.ByondInstallStart, new List <string> { versionKey }, cancellationToken).ConfigureAwait(false); var extractPath = ioManager.ResolvePath(versionKey); async Task DirectoryCleanup() { await ioManager.DeleteDirectory(extractPath, cancellationToken).ConfigureAwait(false); await ioManager.CreateDirectory(extractPath, cancellationToken).ConfigureAwait(false); } var directoryCleanupTask = DirectoryCleanup(); try { Stream versionZipStream; Stream downloadedStream = null; if (customVersionStream == null) { var bytes = await byondInstaller.DownloadVersion(version, cancellationToken).ConfigureAwait(false); downloadedStream = new MemoryStream(bytes); versionZipStream = downloadedStream; } else { versionZipStream = customVersionStream; } using (downloadedStream) { await directoryCleanupTask.ConfigureAwait(false); logger.LogTrace("Extracting downloaded BYOND zip to {0}...", extractPath); await ioManager.ZipToDirectory(extractPath, versionZipStream, cancellationToken).ConfigureAwait(false); } await byondInstaller.InstallByond(extractPath, version, cancellationToken).ConfigureAwait(false); // make sure to do this last because this is what tells us we have a valid version in the future await ioManager.WriteAllBytes( ioManager.ConcatPath(versionKey, VersionFileName), Encoding.UTF8.GetBytes(versionKey), cancellationToken) .ConfigureAwait(false); } catch (WebException e) { // since the user can easily provide non-exitent version numbers, we'll turn this into a JobException throw new JobException(ErrorCode.ByondDownloadFail, e); } catch (OperationCanceledException) { throw; } catch { await ioManager.DeleteDirectory(versionKey, cancellationToken).ConfigureAwait(false); throw; } ourTcs.SetResult(null); } catch (Exception e) { if (!(e is OperationCanceledException)) { await eventConsumer.HandleEvent(EventType.ByondInstallFail, new List <string> { e.Message }, cancellationToken).ConfigureAwait(false); } lock (installedVersions) installedVersions.Remove(versionKey); ourTcs.SetException(e); throw; } return(versionKey); }
/// <summary> /// Prompts the user to create a <see cref="FileLoggingConfiguration"/> /// </summary> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the operation</param> /// <returns>A <see cref="Task{TResult}"/> resulting in the new <see cref="FileLoggingConfiguration"/></returns> async Task <FileLoggingConfiguration> ConfigureLogging(CancellationToken cancellationToken) { var fileLoggingConfiguration = new FileLoggingConfiguration(); await console.WriteAsync(null, true, cancellationToken).ConfigureAwait(false); fileLoggingConfiguration.Disable = !await PromptYesNo("Enable file logging? (y/n): ", cancellationToken).ConfigureAwait(false); if (!fileLoggingConfiguration.Disable) { do { await console.WriteAsync("Log file directory path (leave blank for default): ", false, cancellationToken).ConfigureAwait(false); fileLoggingConfiguration.Directory = await console.ReadLineAsync(false, cancellationToken).ConfigureAwait(false); if (String.IsNullOrWhiteSpace(fileLoggingConfiguration.Directory)) { fileLoggingConfiguration.Directory = null; break; } //test a write of it await console.WriteAsync(null, true, cancellationToken).ConfigureAwait(false); await console.WriteAsync("Testing directory access...", true, cancellationToken).ConfigureAwait(false); try { await ioManager.CreateDirectory(fileLoggingConfiguration.Directory, cancellationToken).ConfigureAwait(false); var testFile = ioManager.ConcatPath(fileLoggingConfiguration.Directory, String.Format(CultureInfo.InvariantCulture, "WizardAccesTest.{0}.deleteme", Guid.NewGuid())); await ioManager.WriteAllBytes(testFile, Array.Empty <byte>(), cancellationToken).ConfigureAwait(false); try { await ioManager.DeleteFile(testFile, cancellationToken).ConfigureAwait(false); } catch (OperationCanceledException) { throw; } catch (Exception e) { await console.WriteAsync(String.Format(CultureInfo.InvariantCulture, "Error deleting test log file: {0}", testFile), true, cancellationToken).ConfigureAwait(false); await console.WriteAsync(e.Message, true, cancellationToken).ConfigureAwait(false); await console.WriteAsync(null, true, cancellationToken).ConfigureAwait(false); } break; } catch (OperationCanceledException) { throw; } catch (Exception e) { await console.WriteAsync(e.Message, true, cancellationToken).ConfigureAwait(false); await console.WriteAsync(null, true, cancellationToken).ConfigureAwait(false); await console.WriteAsync("Please verify the path is valid and you have access to it!", true, cancellationToken).ConfigureAwait(false); } } while (true); async Task <LogLevel?> PromptLogLevel(string question) { do { await console.WriteAsync(null, true, cancellationToken).ConfigureAwait(false); await console.WriteAsync(question, true, cancellationToken).ConfigureAwait(false); await console.WriteAsync(String.Format(CultureInfo.InvariantCulture, "Enter one of {0}/{1}/{2}/{3}/{4}/{5} (leave blank for default): ", nameof(LogLevel.Trace), nameof(LogLevel.Debug), nameof(LogLevel.Information), nameof(LogLevel.Warning), nameof(LogLevel.Error), nameof(LogLevel.Critical)), false, cancellationToken).ConfigureAwait(false); var responseString = await console.ReadLineAsync(false, cancellationToken).ConfigureAwait(false); if (String.IsNullOrWhiteSpace(responseString)) { return(null); } if (Enum.TryParse <LogLevel>(responseString, out var logLevel) && logLevel != LogLevel.None) { return(logLevel); } await console.WriteAsync("Invalid log level!", true, cancellationToken).ConfigureAwait(false); } while (true); } fileLoggingConfiguration.LogLevel = await PromptLogLevel(String.Format(CultureInfo.InvariantCulture, "Enter the level limit for normal logs (default {0}).", fileLoggingConfiguration.LogLevel)).ConfigureAwait(false) ?? fileLoggingConfiguration.LogLevel; fileLoggingConfiguration.MicrosoftLogLevel = await PromptLogLevel(String.Format(CultureInfo.InvariantCulture, "Enter the level limit for Microsoft logs (VERY verbose, default {0}).", fileLoggingConfiguration.MicrosoftLogLevel)).ConfigureAwait(false) ?? fileLoggingConfiguration.MicrosoftLogLevel; } return(fileLoggingConfiguration); }
public async Task <IActionResult> Create([FromBody] Api.Models.Instance model, CancellationToken cancellationToken) { if (model == null) { throw new ArgumentNullException(nameof(model)); } if (String.IsNullOrWhiteSpace(model.Name)) { return(BadRequest(new ErrorMessage { Message = "name must not be empty!" })); } if (model.Path == null) { return(BadRequest(new ErrorMessage { Message = "path must not be empty!" })); } NormalizeModelPath(model, out var rawPath); var localPath = ioManager.ResolvePath("."); NormalizeModelPath(new Api.Models.Instance { Path = localPath }, out var normalizedLocalPath); if (rawPath.StartsWith(normalizedLocalPath, StringComparison.Ordinal)) { bool sameLength = rawPath.Length == normalizedLocalPath.Length; char dirSeparatorChar = rawPath.ToCharArray()[normalizedLocalPath.Length]; if (sameLength || dirSeparatorChar == Path.DirectorySeparatorChar || dirSeparatorChar == Path.AltDirectorySeparatorChar) { return(Conflict(new ErrorMessage { Message = "Instances cannot be created in the installation directory!" })); } } var dirExistsTask = ioManager.DirectoryExists(model.Path, cancellationToken); bool attached = false; if (await ioManager.FileExists(model.Path, cancellationToken).ConfigureAwait(false) || await dirExistsTask.ConfigureAwait(false)) { if (!await ioManager.FileExists(ioManager.ConcatPath(model.Path, InstanceAttachFileName), cancellationToken).ConfigureAwait(false)) { return(Conflict(new ErrorMessage { Message = "Path not empty!" })); } else { attached = true; } } var newInstance = new Models.Instance { ConfigurationType = model.ConfigurationType ?? ConfigurationType.Disallowed, DreamDaemonSettings = new DreamDaemonSettings { AllowWebClient = false, AutoStart = false, PrimaryPort = 1337, SecondaryPort = 1338, SecurityLevel = DreamDaemonSecurity.Safe, SoftRestart = false, SoftShutdown = false, StartupTimeout = 20 }, DreamMakerSettings = new DreamMakerSettings { ApiValidationPort = 1339, ApiValidationSecurityLevel = DreamDaemonSecurity.Safe }, Name = model.Name, Online = false, Path = model.Path, AutoUpdateInterval = model.AutoUpdateInterval ?? 0, RepositorySettings = new RepositorySettings { CommitterEmail = "*****@*****.**", CommitterName = application.VersionPrefix, PushTestMergeCommits = false, ShowTestMergeCommitters = false, AutoUpdatesKeepTestMerges = false, AutoUpdatesSynchronize = false, PostTestMergeComment = false }, InstanceUsers = new List <Models.InstanceUser> // give this user full privileges on the instance { InstanceAdminUser() } }; DatabaseContext.Instances.Add(newInstance); try { await DatabaseContext.Save(cancellationToken).ConfigureAwait(false); try { // actually reserve it now await ioManager.CreateDirectory(rawPath, cancellationToken).ConfigureAwait(false); await ioManager.DeleteFile(ioManager.ConcatPath(rawPath, InstanceAttachFileName), cancellationToken).ConfigureAwait(false); } catch { // oh shit delete the model DatabaseContext.Instances.Remove(newInstance); await DatabaseContext.Save(default).ConfigureAwait(false);
/// <inheritdoc /> #pragma warning disable CA1506 // TODO: Decomplexify public async Task <IDmbProvider> FromCompileJob(CompileJob compileJob, CancellationToken cancellationToken) { if (compileJob == null) { throw new ArgumentNullException(nameof(compileJob)); } // ensure we have the entire metadata tree logger.LogTrace("Loading compile job {0}...", compileJob.Id); await databaseContextFactory.UseContext( async db => compileJob = await db .CompileJobs .AsQueryable() .Where(x => x.Id == compileJob.Id) .Include(x => x.Job) .ThenInclude(x => x.StartedBy) .Include(x => x.RevisionInformation) .ThenInclude(x => x.PrimaryTestMerge) .ThenInclude(x => x.MergedBy) .Include(x => x.RevisionInformation) .ThenInclude(x => x.ActiveTestMerges) .ThenInclude(x => x.TestMerge) .ThenInclude(x => x.MergedBy) .FirstAsync(cancellationToken) .ConfigureAwait(false)) .ConfigureAwait(false); // can't wait to see that query if (!compileJob.Job.StoppedAt.HasValue) { // This happens when we're told to load the compile job that is currently finished up // It constitutes an API violation if it's returned by the DreamDaemonController so just set it here // Bit of a hack, but it works out to be nearly if not the same value that's put in the DB logger.LogTrace("Setting missing StoppedAt for CompileJob.Job #{0}...", compileJob.Job.Id); compileJob.Job.StoppedAt = DateTimeOffset.UtcNow; } var providerSubmitted = false; void CleanupAction() { if (providerSubmitted) { CleanJob(compileJob); } } var newProvider = new DmbProvider(compileJob, ioManager, CleanupAction); try { const string LegacyADirectoryName = "A"; const string LegacyBDirectoryName = "B"; var dmbExistsAtRoot = await ioManager.FileExists( ioManager.ConcatPath( newProvider.Directory, newProvider.DmbName), cancellationToken) .ConfigureAwait(false); if (!dmbExistsAtRoot) { logger.LogTrace("Didn't find .dmb at game directory root, checking A/B dirs..."); var primaryCheckTask = ioManager.FileExists( ioManager.ConcatPath( newProvider.Directory, LegacyADirectoryName, newProvider.DmbName), cancellationToken); var secondaryCheckTask = ioManager.FileExists( ioManager.ConcatPath( newProvider.Directory, LegacyBDirectoryName, newProvider.DmbName), cancellationToken); if (!(await primaryCheckTask.ConfigureAwait(false) && await secondaryCheckTask.ConfigureAwait(false))) { logger.LogWarning("Error loading compile job, .dmb missing!"); return(null); // omae wa mou shinderu } // rebuild the provider because it's using the legacy style directories // Don't dispose it logger.LogDebug("Creating legacy two folder .dmb provider targeting {0} directory...", LegacyADirectoryName); newProvider = new DmbProvider(compileJob, ioManager, CleanupAction, Path.DirectorySeparatorChar + LegacyADirectoryName); } lock (jobLockCounts) { if (!jobLockCounts.TryGetValue(compileJob.Id, out int value)) { value = 1; jobLockCounts.Add(compileJob.Id, 1); } else { jobLockCounts[compileJob.Id] = ++value; } providerSubmitted = true; logger.LogTrace("Compile job {0} lock count now: {1}", compileJob.Id, value); return(newProvider); } } finally { if (!providerSubmitted) { newProvider.Dispose(); } } }
/// <summary> /// Generate map diffs for a given <paramref name="pullRequest"/> /// </summary> /// <param name="pullRequest">The <see cref="PullRequest"/></param> /// <param name="checkRunId">The <see cref="CheckRun.Id"/></param> /// <param name="changedDmms">Paths to changed .dmm files</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the operation</param> /// <returns>A <see cref="Task"/> representing the running operation</returns> async Task GenerateDiffs(PullRequest pullRequest, long checkRunId, IReadOnlyList <string> changedDmms, CancellationToken cancellationToken) { using (logger.BeginScope("Generating {0} diffs for pull request #{1} in {2}/{3}", changedDmms.Count, pullRequest.Number, pullRequest.Base.Repository.Owner.Login, pullRequest.Base.Repository.Name)) { const string OldMapExtension = ".old_map_diff_bot"; var gitHubManager = serviceProvider.GetRequiredService <IGitHubManager>(); Task generatingCommentTask; List <Task <RenderResult> > afterRenderings, beforeRenderings; var workingDir = ioManager.ConcatPath(pullRequest.Base.Repository.Owner.Login, pullRequest.Base.Repository.Name, pullRequest.Number.ToString(CultureInfo.InvariantCulture)); logger.LogTrace("Setting workdir to {0}", workingDir); IIOManager currentIOManager = new ResolvingIOManager(ioManager, workingDir); string repoPath; int lastProgress = -1; Task lastProgressUpdate = Task.CompletedTask; async Task OnCloneProgress(int progress) { lock (gitHubManager) { if (lastProgress >= progress) { return; } if (lastProgress == -1) { logger.LogInformation("Waiting on repository to finish cloning..."); } lastProgress = progress; } await lastProgressUpdate.ConfigureAwait(false); await gitHubManager.UpdateCheckRun(pullRequest.Base.Repository.Id, checkRunId, new CheckRunUpdate { Status = CheckStatus.InProgress, Output = new CheckRunOutput(stringLocalizer["Cloning Repository"], stringLocalizer["Clone Progress: {0}%", progress], null, null, null), }, cancellationToken).ConfigureAwait(false); }; Task CreateBlockedComment() { logger.LogInformation("Waiting for another diff generation on {0}/{1} to complete...", pullRequest.Base.Repository.Owner.Login, pullRequest.Base.Repository.Name); return(gitHubManager.CreateSingletonComment(pullRequest, stringLocalizer["Waiting for another operation on this repository to complete..."], cancellationToken)); }; logger.LogTrace("Locking repository..."); using (var repo = await repositoryManager.GetRepository(pullRequest.Base.Repository, OnCloneProgress, CreateBlockedComment, cancellationToken).ConfigureAwait(false)) { logger.LogTrace("Repository ready"); generatingCommentTask = gitHubManager.UpdateCheckRun(pullRequest.Base.Repository.Id, checkRunId, new CheckRunUpdate { Status = CheckStatus.InProgress, Output = new CheckRunOutput(stringLocalizer["Generating Diffs"], stringLocalizer["Aww geez rick, I should eventually put some progress message here"], null, null, null), }, cancellationToken); //prep the outputDirectory async Task DirectoryPrep() { logger.LogTrace("Cleaning workdir..."); await currentIOManager.DeleteDirectory(".", cancellationToken).ConfigureAwait(false); await currentIOManager.CreateDirectory(".", cancellationToken).ConfigureAwait(false); logger.LogTrace("Workdir cleaned"); }; var dirPrepTask = DirectoryPrep(); //get the dme to use var dmeToUseTask = serviceProvider.GetRequiredService <IDatabaseContext>().InstallationRepositories.Where(x => x.Id == pullRequest.Base.Repository.Id).Select(x => x.TargetDme).ToAsyncEnumerable().FirstOrDefault(cancellationToken); var oldMapPaths = new List <string>() { Capacity = changedDmms.Count }; try { //fetch base commit if necessary and check it out, fetch pull request if (!await repo.ContainsCommit(pullRequest.Base.Sha, cancellationToken).ConfigureAwait(false)) { logger.LogTrace("Base commit not found, running fetch..."); await repo.Fetch(cancellationToken).ConfigureAwait(false); } logger.LogTrace("Moving HEAD to pull request base..."); await repo.Checkout(pullRequest.Base.Sha, cancellationToken).ConfigureAwait(false); //but since we don't need this right await don't await it yet var pullRequestFetchTask = repo.FetchPullRequest(pullRequest.Number, cancellationToken); try { //first copy all modified maps to the same location with the .old_map_diff_bot extension async Task <string> CacheMap(string mapPath) { var originalPath = currentIOManager.ConcatPath(repoPath, mapPath); if (await currentIOManager.FileExists(originalPath, cancellationToken).ConfigureAwait(false)) { logger.LogTrace("Creating old map cache of {0}", mapPath); var oldMapPath = String.Format(CultureInfo.InvariantCulture, "{0}{1}", originalPath, OldMapExtension); await currentIOManager.CopyFile(originalPath, oldMapPath, cancellationToken).ConfigureAwait(false); return(oldMapPath); } return(null); }; repoPath = repo.Path; var tasks = changedDmms.Select(x => CacheMap(x)).ToList(); await Task.WhenAll(tasks).ConfigureAwait(false); oldMapPaths.AddRange(tasks.Select(x => x.Result)); } finally { logger.LogTrace("Waiting for pull request commits to be available..."); await pullRequestFetchTask.ConfigureAwait(false); } logger.LogTrace("Creating and moving HEAD to pull request merge commit..."); //generate the merge commit ourselves since we can't get it from GitHub because itll return an outdated one await repo.Merge(pullRequest.Head.Sha, cancellationToken).ConfigureAwait(false); } finally { logger.LogTrace("Waiting for configured project dme..."); await dmeToUseTask.ConfigureAwait(false); } //create empty array of map regions var mapRegions = Enumerable.Repeat <MapRegion>(null, changedDmms.Count).ToList(); var dmeToUse = dmeToUseTask.Result; var generator = generatorFactory.CreateGenerator(dmeToUse, new ResolvingIOManager(ioManager, repoPath)); var outputDirectory = currentIOManager.ResolvePath("."); logger.LogTrace("Full workdir path: {0}", outputDirectory); //Generate MapRegions for modified maps and render all new maps async Task <RenderResult> DiffAndRenderNewMap(int I) { await dirPrepTask.ConfigureAwait(false); var originalPath = currentIOManager.ConcatPath(repoPath, changedDmms[I]); if (!await currentIOManager.FileExists(originalPath, cancellationToken).ConfigureAwait(false)) { logger.LogTrace("No new map for path {0} exists, skipping region detection and after render", changedDmms[I]); return(new RenderResult { InputPath = changedDmms[I], ToolOutput = stringLocalizer["Map missing!"] }); } ToolResult result = null; if (oldMapPaths[I] != null) { logger.LogTrace("Getting diff region for {0}...", changedDmms[I]); result = await generator.GetDifferences(oldMapPaths[I], originalPath, cancellationToken).ConfigureAwait(false); var region = result.MapRegion; logger.LogTrace("Diff region for {0}: {1}", changedDmms[I], region); if (region != null) { var xdiam = region.MaxX - region.MinX; var ydiam = region.MaxY - region.MinY; const int minDiffDimensions = 5 - 1; if (xdiam < minDiffDimensions || ydiam < minDiffDimensions) { //need to expand var fullResult = await generator.GetMapSize(originalPath, cancellationToken).ConfigureAwait(false); var fullRegion = fullResult.MapRegion; if (fullRegion == null) { //give up region = null; } else { bool increaseMax = true; if (xdiam < minDiffDimensions && ((fullRegion.MaxX - fullRegion.MinX) >= minDiffDimensions)) { while ((region.MaxX - region.MinX) < minDiffDimensions) { if (increaseMax) { region.MaxX = (short)Math.Min(region.MaxX + 1, fullRegion.MaxX); } else { region.MinX = (short)Math.Max(region.MinX - 1, 1); } increaseMax = !increaseMax; } } if (ydiam < minDiffDimensions && ((fullRegion.MaxY - fullRegion.MinY) >= minDiffDimensions)) { while ((region.MaxY - region.MinY) < minDiffDimensions) { if (increaseMax) { region.MaxY = (short)Math.Min(region.MaxY + 1, fullRegion.MaxY); } else { region.MinY = (short)Math.Max(region.MinY - 1, 1); } increaseMax = !increaseMax; } } } logger.LogTrace("Region for {0} expanded to {1}", changedDmms[I], region); } mapRegions[I] = region; } } else { logger.LogTrace("Skipping region detection for {0} due to old map not existing", changedDmms[I]); } logger.LogTrace("Performing after rendering for {0}...", changedDmms[I]); var renderResult = await generator.RenderMap(originalPath, mapRegions[I], outputDirectory, "after", cancellationToken).ConfigureAwait(false); logger.LogTrace("After rendering for {0} complete! Result path: {1}, Output: {2}", changedDmms[I], renderResult.OutputPath, renderResult.ToolOutput); if (result != null) { renderResult.ToolOutput = String.Format(CultureInfo.InvariantCulture, "Differences task:{0}{1}{0}Render task:{0}{2}", Environment.NewLine, result.ToolOutput, renderResult.ToolOutput); } return(renderResult); }; logger.LogTrace("Running iterations of DiffAndRenderNewMap..."); //finish up before we go back to the base branch afterRenderings = Enumerable.Range(0, changedDmms.Count).Select(I => DiffAndRenderNewMap(I)).ToList(); try { await Task.WhenAll(afterRenderings).ConfigureAwait(false); } catch (Exception e) { logger.LogDebug(e, "After renderings produced exception!"); //at this point everything is done but some have failed //we'll handle it later } logger.LogTrace("Moving HEAD back to pull request base..."); await repo.Checkout(pullRequest.Base.Sha, cancellationToken).ConfigureAwait(false); Task <RenderResult> RenderOldMap(int i) { var oldPath = oldMapPaths[i]; if (oldMapPaths != null) { logger.LogTrace("Performing before rendering for {0}...", changedDmms[i]); return(generator.RenderMap(oldPath, mapRegions[i], outputDirectory, "before", cancellationToken)); } return(Task.FromResult(new RenderResult { InputPath = changedDmms[i], ToolOutput = stringLocalizer["Map missing!"] })); } logger.LogTrace("Running iterations of RenderOldMap..."); //finish up rendering beforeRenderings = Enumerable.Range(0, changedDmms.Count).Select(I => RenderOldMap(I)).ToList(); try { await Task.WhenAll(beforeRenderings).ConfigureAwait(false); } catch (Exception e) { logger.LogDebug(e, "Before renderings produced exception!"); //see above } //done with the repo at this point logger.LogTrace("Renderings complete. Releasing reposiotory"); } //collect results and errors async Task <KeyValuePair <MapDiff, MapRegion> > GetResult(int i) { var beforeTask = beforeRenderings[i]; var afterTask = afterRenderings[i]; var result = new MapDiff { InstallationRepositoryId = pullRequest.Base.Repository.Id, CheckRunId = checkRunId, FileId = i, }; RenderResult GetRenderingResult(Task <RenderResult> task) { if (task.Exception != null) { result.LogMessage = result.LogMessage == null?task.Exception.ToString() : String.Format(CultureInfo.InvariantCulture, "{0}{1}{2}", result.LogMessage, Environment.NewLine, task.Exception); return(null); } return(task.Result); }; var r1 = GetRenderingResult(beforeTask); var r2 = GetRenderingResult(afterTask); logger.LogTrace("Results for {0}: Before {1}, After {2}", changedDmms[i], r1?.OutputPath ?? "NONE", r2?.OutputPath ?? "NONE"); result.MapPath = changedDmms[i]; result.LogMessage = String.Format(CultureInfo.InvariantCulture, "Job {5}:{0}Path: {6}{0}Before:{0}Command Line: {1}{0}Output:{0}{2}{0}Logs:{0}{7}{0}After:{0}Command Line: {3}{0}Output:{0}{4}{0}Logs:{0}{8}{0}Exceptions:{0}{9}{0}", Environment.NewLine, r1?.CommandLine, r1?.OutputPath, r2?.CommandLine, r2?.OutputPath, i + 1, result.MapPath, r1?.ToolOutput, r2?.ToolOutput, result.LogMessage); async Task <byte[]> ReadMapImage(string path) { if (path != null && await currentIOManager.FileExists(path, cancellationToken).ConfigureAwait(false)) { var bytes = await currentIOManager.ReadAllBytes(path, cancellationToken).ConfigureAwait(false); await currentIOManager.DeleteFile(path, cancellationToken).ConfigureAwait(false); return(bytes); } return(null); } var readBeforeTask = ReadMapImage(r1?.OutputPath); result.AfterImage = await ReadMapImage(r2?.OutputPath).ConfigureAwait(false); result.BeforeImage = await readBeforeTask.ConfigureAwait(false); return(new KeyValuePair <MapDiff, MapRegion>(result, r2?.MapRegion)); } logger.LogTrace("Waiting for notification comment to POST..."); await generatingCommentTask.ConfigureAwait(false); logger.LogTrace("Collecting results..."); var results = Enumerable.Range(0, changedDmms.Count).Select(x => GetResult(x)).ToList(); await Task.WhenAll(results).ConfigureAwait(false); var dic = new Dictionary <MapDiff, MapRegion>(); foreach (var I in results.Select(x => x.Result)) { dic.Add(I.Key, I.Value); } await HandleResults(pullRequest, checkRunId, dic, cancellationToken).ConfigureAwait(false); } }
/// <summary> /// Construct a <see cref="Repository"/> /// </summary> /// <param name="gitHubConfigurationOptions">The <see cref="IOptions{TOptions}"/> containing the <see cref="GitHubConfiguration"/> to use for determining the <see cref="repositoryObject"/>'s path</param> /// <param name="_logger">The <see cref="ILogger"/> to use for setting up the <see cref="Repository"/></param> /// <param name="_ioManager">The value of <see cref="ioManager"/></param> public Repository(IOptions <GitHubConfiguration> gitHubConfigurationOptions, ILogger <Repository> _logger, IIOManager _ioManager) { logger = _logger ?? throw new ArgumentNullException(nameof(_logger)); gitHubConfiguration = gitHubConfigurationOptions?.Value ?? throw new ArgumentNullException(nameof(gitHubConfigurationOptions)); ioManager = new ResolvingIOManager(_ioManager ?? throw new ArgumentNullException(nameof(_ioManager)), _ioManager.ConcatPath(Application.DataDirectory, RepositoriesDirectory)); semaphore = new SemaphoreSlim(1); }
/// <inheritdoc /> #pragma warning disable CA1506 // TODO: Decomplexify public async Task <IDmbProvider> FromCompileJob(CompileJob compileJob, CancellationToken cancellationToken) { if (compileJob == null) { throw new ArgumentNullException(nameof(compileJob)); } // ensure we have the entire compile job tree logger.LogTrace("Loading compile job {0}...", compileJob.Id); await databaseContextFactory.UseContext( async db => compileJob = await db .CompileJobs .AsQueryable() .Where(x => x.Id == compileJob.Id) .Include(x => x.Job).ThenInclude(x => x.StartedBy) .Include(x => x.RevisionInformation).ThenInclude(x => x.PrimaryTestMerge).ThenInclude(x => x.MergedBy) .Include(x => x.RevisionInformation).ThenInclude(x => x.ActiveTestMerges).ThenInclude(x => x.TestMerge).ThenInclude(x => x.MergedBy) .FirstAsync(cancellationToken) .ConfigureAwait(false)) .ConfigureAwait(false); // can't wait to see that query if (!compileJob.Job.StoppedAt.HasValue) { // This happens if we're told to load the compile job that is currently finished up // It can constitute an API violation if it's returned by the DreamDaemonController so just set it here // Bit of a hack, but it should work out to be the same value logger.LogTrace("Setting missing StoppedAt for CompileJob job..."); compileJob.Job.StoppedAt = DateTimeOffset.Now; } var providerSubmitted = false; var newProvider = new DmbProvider(compileJob, ioManager, () => { if (providerSubmitted) { CleanJob(compileJob); } }); try { var primaryCheckTask = ioManager.FileExists(ioManager.ConcatPath(newProvider.PrimaryDirectory, newProvider.DmbName), cancellationToken); var secondaryCheckTask = ioManager.FileExists(ioManager.ConcatPath(newProvider.PrimaryDirectory, newProvider.DmbName), cancellationToken); if (!(await primaryCheckTask.ConfigureAwait(false) && await secondaryCheckTask.ConfigureAwait(false))) { logger.LogWarning("Error loading compile job, .dmb missing!"); return(null); // omae wa mou shinderu } lock (jobLockCounts) { if (!jobLockCounts.TryGetValue(compileJob.Id, out int value)) { value = 1; jobLockCounts.Add(compileJob.Id, 1); } else { jobLockCounts[compileJob.Id] = ++value; } logger.LogTrace("Compile job {0} lock count now: {1}", compileJob.Id, value); providerSubmitted = true; return(newProvider); } } finally { if (!providerSubmitted) { newProvider.Dispose(); } } }
/// <inheritdoc /> public Task Initialize(CancellationToken cancellationToken) { return(Task.Factory.StartNew(async() => { using (logger.BeginScope("Initializing repository...")) { var repoPath = ioManager.ResolvePath(ioManager.ConcatPath(gitHubConfiguration.RepoOwner, gitHubConfiguration.RepoName)); logger.LogTrace("Repo path evaluated to be: {0}", repoPath); try { logger.LogTrace("Creating repository object."); cancellationToken.ThrowIfCancellationRequested(); repositoryObject = new LibGit2Sharp.Repository(repoPath); repositoryObject.RemoveUntrackedFiles(); cancellationToken.ThrowIfCancellationRequested(); repositoryObject.RetrieveStatus(); } catch (OperationCanceledException e) { logger.LogDebug(e, "Repository setup cancelled!"); repositoryObject?.Dispose(); throw; } catch (Exception e) { cancellationToken.ThrowIfCancellationRequested(); using (logger.BeginScope("Repository fallback initializing...")) { repositoryObject?.Dispose(); try { logger.LogTrace("Checking repository directory exists."); if (await ioManager.DirectoryExists(repoPath, cancellationToken).ConfigureAwait(false)) { logger.LogWarning(e, "Failed to load repository! Deleting and cloning..."); await ioManager.DeleteDirectory(repoPath, cancellationToken).ConfigureAwait(false); } else { logger.LogInformation(e, "Cloning repository..."); } LibGit2Sharp.Repository.Clone(String.Format(CultureInfo.InvariantCulture, "https://github.com/{0}/{1}", gitHubConfiguration.RepoOwner, gitHubConfiguration.RepoName), repoPath, new CloneOptions { Checkout = false, RecurseSubmodules = true, OnProgress = (a) => !cancellationToken.IsCancellationRequested, OnUpdateTips = (a, b, c) => !cancellationToken.IsCancellationRequested, OnTransferProgress = (a) => !cancellationToken.IsCancellationRequested }); logger.LogInformation("Repo clone completed."); repositoryObject = new LibGit2Sharp.Repository(repoPath); } catch (UserCancelledException e2) { logger.LogDebug(e2, "Repository setup cancelled!"); cancellationToken.ThrowIfCancellationRequested(); } catch (Exception e2) { logger.LogCritical(e2, "Unable to clone repository!"); throw; } } } } }, cancellationToken, TaskCreationOptions.LongRunning, TaskScheduler.Current)); }
/// <summary> /// Configure dependency injected services /// </summary> /// <param name="services">The <see cref="IServiceCollection"/> to configure</param> public void ConfigureServices(IServiceCollection services) { if (services == null) { throw new ArgumentNullException(nameof(services)); } // needful services.AddSingleton <IApplication>(this); // configure configuration services.UseStandardConfig <UpdatesConfiguration>(configuration); services.UseStandardConfig <DatabaseConfiguration>(configuration); services.UseStandardConfig <GeneralConfiguration>(configuration); services.UseStandardConfig <FileLoggingConfiguration>(configuration); services.UseStandardConfig <ControlPanelConfiguration>(configuration); // enable options which give us config reloading services.AddOptions(); // this is needful for the setup wizard services.AddLogging(); // other stuff needed for for setup wizard and configuration services.AddSingleton <IConsole, IO.Console>(); services.AddSingleton <IDatabaseConnectionFactory, DatabaseConnectionFactory>(); services.AddSingleton <ISetupWizard, SetupWizard>(); services.AddSingleton <IPlatformIdentifier, PlatformIdentifier>(); services.AddSingleton <IAsyncDelayer, AsyncDelayer>(); GeneralConfiguration generalConfiguration; DatabaseConfiguration databaseConfiguration; FileLoggingConfiguration fileLoggingConfiguration; ControlPanelConfiguration controlPanelConfiguration; IPlatformIdentifier platformIdentifier; // temporarily build the service provider in it's current state // do it here so we can run the setup wizard if necessary // also allows us to get some options and other services we need for continued configuration using (var provider = services.BuildServiceProvider()) { // run the wizard if necessary var setupWizard = provider.GetRequiredService <ISetupWizard>(); var applicationLifetime = provider.GetRequiredService <Microsoft.AspNetCore.Hosting.IApplicationLifetime>(); var setupWizardRan = setupWizard.CheckRunWizard(applicationLifetime.ApplicationStopping).GetAwaiter().GetResult(); // load the configuration options we need var generalOptions = provider.GetRequiredService <IOptions <GeneralConfiguration> >(); generalConfiguration = generalOptions.Value; // unless this is set, in which case, we leave if (setupWizardRan && generalConfiguration.SetupWizardMode == SetupWizardMode.Only) { throw new OperationCanceledException("Exiting due to SetupWizardMode configuration!"); // we don't inject a logger in the constuctor to log this because it's not yet configured } var dbOptions = provider.GetRequiredService <IOptions <DatabaseConfiguration> >(); databaseConfiguration = dbOptions.Value; var loggingOptions = provider.GetRequiredService <IOptions <FileLoggingConfiguration> >(); fileLoggingConfiguration = loggingOptions.Value; var controlPanelOptions = provider.GetRequiredService <IOptions <ControlPanelConfiguration> >(); controlPanelConfiguration = controlPanelOptions.Value; platformIdentifier = provider.GetRequiredService <IPlatformIdentifier>(); } // setup file logging via serilog if (!fileLoggingConfiguration.Disable) { services.AddLogging(builder => { // common app data is C:/ProgramData on windows, else /usr/shar var logPath = !String.IsNullOrEmpty(fileLoggingConfiguration.Directory) ? fileLoggingConfiguration.Directory : ioManager.ConcatPath(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), VersionPrefix, "Logs"); logPath = ioManager.ConcatPath(logPath, "tgs-{Date}.log"); LogEventLevel?ConvertLogLevel(LogLevel logLevel) { switch (logLevel) { case LogLevel.Critical: return(LogEventLevel.Fatal); case LogLevel.Debug: return(LogEventLevel.Debug); case LogLevel.Error: return(LogEventLevel.Error); case LogLevel.Information: return(LogEventLevel.Information); case LogLevel.Trace: return(LogEventLevel.Verbose); case LogLevel.Warning: return(LogEventLevel.Warning); case LogLevel.None: return(null); default: throw new InvalidOperationException(String.Format(CultureInfo.InvariantCulture, "Invalid log level {0}", logLevel)); } } var logEventLevel = ConvertLogLevel(fileLoggingConfiguration.LogLevel); var microsoftEventLevel = ConvertLogLevel(fileLoggingConfiguration.MicrosoftLogLevel); var formatter = new MessageTemplateTextFormatter("{Timestamp:o} {RequestId,13} [{Level:u3}] {SourceContext:l}: {Message} ({EventId:x8}){NewLine}{Exception}", null); var configuration = new LoggerConfiguration() .Enrich.FromLogContext() .WriteTo.Async(w => w.RollingFile(formatter, logPath, shared: true, flushToDiskInterval: TimeSpan.FromSeconds(2))); if (logEventLevel.HasValue) { configuration.MinimumLevel.Is(logEventLevel.Value); } if (microsoftEventLevel.HasValue) { configuration.MinimumLevel.Override("Microsoft", microsoftEventLevel.Value); } builder.AddSerilog(configuration.CreateLogger(), true); }); } // configure bearer token validation services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme).AddJwtBearer(jwtBearerOptions => { // this line isn't actually run until the first request is made // at that point tokenFactory will be populated jwtBearerOptions.TokenValidationParameters = tokenFactory.ValidationParameters; jwtBearerOptions.Events = new JwtBearerEvents { // Application is our composition root so this monstrosity of a line is okay OnTokenValidated = ctx => ctx.HttpContext.RequestServices.GetRequiredService <IClaimsInjector>().InjectClaimsIntoContext(ctx, ctx.HttpContext.RequestAborted) }; }); // f*****g prevents converting 'sub' to M$ bs // can't be done in the above lambda, that's too late JwtSecurityTokenHandler.DefaultInboundClaimTypeMap.Clear(); // add mvc, configure the json serializer settings services .AddMvc(options => { var dataAnnotationValidator = options.ModelValidatorProviders.Single(validator => validator.GetType().Name == "DataAnnotationsModelValidatorProvider"); options.ModelValidatorProviders.Remove(dataAnnotationValidator); }) .SetCompatibilityVersion(CompatibilityVersion.Version_2_1) .AddJsonOptions(options => { options.AllowInputFormatterExceptionMessages = true; options.SerializerSettings.NullValueHandling = NullValueHandling.Ignore; options.SerializerSettings.CheckAdditionalContent = true; options.SerializerSettings.MissingMemberHandling = MissingMemberHandling.Error; options.SerializerSettings.ReferenceLoopHandling = ReferenceLoopHandling.Ignore; options.SerializerSettings.Converters = new[] { new VersionConverter() }; }); if (hostingEnvironment.IsDevelopment()) { string GetDocumentationFilePath(string assemblyLocation) => ioManager.ConcatPath(ioManager.GetDirectoryName(assemblyLocation), String.Concat(ioManager.GetFileNameWithoutExtension(assemblyLocation), ".xml")); var assemblyDocumentationPath = GetDocumentationFilePath(assemblyInformationProvider.Path); var apiDocumentationPath = GetDocumentationFilePath(typeof(ApiHeaders).Assembly.Location); services.AddSwaggerGen(genOptions => SwaggerConfiguration.Configure(genOptions, assemblyDocumentationPath, apiDocumentationPath)); } // enable browser detection services.AddDetectionCore().AddBrowser(); // CORS conditionally enabled later services.AddCors(); void AddTypedContext <TContext>() where TContext : DatabaseContext <TContext> { services.AddDbContext <TContext>(builder => { if (hostingEnvironment.IsDevelopment()) { builder.EnableSensitiveDataLogging(); } }); services.AddScoped <IDatabaseContext>(x => x.GetRequiredService <TContext>()); } // add the correct database context type var dbType = databaseConfiguration.DatabaseType; switch (dbType) { case DatabaseType.MySql: case DatabaseType.MariaDB: AddTypedContext <MySqlDatabaseContext>(); break; case DatabaseType.SqlServer: AddTypedContext <SqlServerDatabaseContext>(); break; default: throw new InvalidOperationException(String.Format(CultureInfo.InvariantCulture, "Invalid {0}: {1}!", nameof(DatabaseType), dbType)); } // configure other database services services.AddSingleton <IDatabaseContextFactory, DatabaseContextFactory>(); services.AddSingleton <IDatabaseSeeder, DatabaseSeeder>(); // configure security services services.AddScoped <IAuthenticationContextFactory, AuthenticationContextFactory>(); services.AddScoped <IClaimsInjector, ClaimsInjector>(); services.AddSingleton <IIdentityCache, IdentityCache>(); services.AddSingleton <ICryptographySuite, CryptographySuite>(); services.AddSingleton <ITokenFactory, TokenFactory>(); services.AddSingleton <IPasswordHasher <Models.User>, PasswordHasher <Models.User> >(); // configure platform specific services if (platformIdentifier.IsWindows) { if (generalConfiguration.UseBasicWatchdogOnWindows) { services.AddSingleton <IWatchdogFactory, WatchdogFactory>(); } else { services.AddSingleton <IWatchdogFactory, WindowsWatchdogFactory>(); } services.AddSingleton <ISystemIdentityFactory, WindowsSystemIdentityFactory>(); services.AddSingleton <ISymlinkFactory, WindowsSymlinkFactory>(); services.AddSingleton <IByondInstaller, WindowsByondInstaller>(); services.AddSingleton <IPostWriteHandler, WindowsPostWriteHandler>(); services.AddSingleton <WindowsNetworkPromptReaper>(); services.AddSingleton <INetworkPromptReaper>(x => x.GetRequiredService <WindowsNetworkPromptReaper>()); services.AddSingleton <IHostedService>(x => x.GetRequiredService <WindowsNetworkPromptReaper>()); } else { services.AddSingleton <IWatchdogFactory, WatchdogFactory>(); services.AddSingleton <ISystemIdentityFactory, PosixSystemIdentityFactory>(); services.AddSingleton <ISymlinkFactory, PosixSymlinkFactory>(); services.AddSingleton <IByondInstaller, PosixByondInstaller>(); services.AddSingleton <IPostWriteHandler, PosixPostWriteHandler>(); services.AddSingleton <INetworkPromptReaper, PosixNetworkPromptReaper>(); } // configure misc services services.AddSingleton <ISynchronousIOManager, SynchronousIOManager>(); services.AddSingleton <IGitHubClientFactory, GitHubClientFactory>(); services.AddSingleton <IProcessExecutor, ProcessExecutor>(); services.AddSingleton <IByondTopicSender>(new ByondTopicSender { ReceiveTimeout = generalConfiguration.ByondTopicTimeout, SendTimeout = generalConfiguration.ByondTopicTimeout }); // configure component services services.AddSingleton <ICredentialsProvider, CredentialsProvider>(); services.AddSingleton <IProviderFactory, ProviderFactory>(); services.AddSingleton <IChatFactory, ChatFactory>(); services.AddSingleton <IInstanceFactory, InstanceFactory>(); // configure root services services.AddSingleton <InstanceManager>(); services.AddSingleton <IInstanceManager>(x => x.GetRequiredService <InstanceManager>()); services.AddSingleton <IHostedService>(x => x.GetRequiredService <InstanceManager>()); services.AddSingleton <IJobManager, JobManager>(); }
public async Task <IActionResult> Create([FromBody] Api.Models.Instance model, CancellationToken cancellationToken) { if (model == null) { throw new ArgumentNullException(nameof(model)); } if (String.IsNullOrWhiteSpace(model.Name)) { return(BadRequest(new ErrorMessage(ErrorCode.InstanceWhitespaceName))); } var unNormalizedPath = model.Path; var targetInstancePath = NormalizePath(unNormalizedPath); model.Path = targetInstancePath; var installationDirectoryPath = NormalizePath(DefaultIOManager.CurrentDirectory); bool InstanceIsChildOf(string otherPath) { if (!targetInstancePath.StartsWith(otherPath, StringComparison.Ordinal)) { return(false); } bool sameLength = targetInstancePath.Length == otherPath.Length; char dirSeparatorChar = targetInstancePath.ToCharArray()[Math.Min(otherPath.Length, targetInstancePath.Length - 1)]; return(sameLength || dirSeparatorChar == Path.DirectorySeparatorChar || dirSeparatorChar == Path.AltDirectorySeparatorChar); } if (InstanceIsChildOf(installationDirectoryPath)) { return(Conflict(new ErrorMessage(ErrorCode.InstanceAtConflictingPath))); } // Validate it's not a child of any other instance IActionResult earlyOut = null; ulong countOfOtherInstances = 0; using (var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken)) { var newCancellationToken = cts.Token; try { await DatabaseContext .Instances .AsQueryable() .Select(x => new Models.Instance { Path = x.Path }) .ForEachAsync( otherInstance => { if (++countOfOtherInstances >= generalConfiguration.InstanceLimit) { earlyOut ??= Conflict(new ErrorMessage(ErrorCode.InstanceLimitReached)); } else if (InstanceIsChildOf(otherInstance.Path)) { earlyOut ??= Conflict(new ErrorMessage(ErrorCode.InstanceAtConflictingPath)); } if (earlyOut != null && !newCancellationToken.IsCancellationRequested) { cts.Cancel(); } }, newCancellationToken) .ConfigureAwait(false); } catch (OperationCanceledException) { cancellationToken.ThrowIfCancellationRequested(); } } if (earlyOut != null) { return(earlyOut); } // Last test, ensure it's in the list of valid paths if (!(generalConfiguration.ValidInstancePaths? .Select(path => NormalizePath(path)) .Any(path => InstanceIsChildOf(path)) ?? true)) { return(BadRequest(new ErrorMessage(ErrorCode.InstanceNotAtWhitelistedPath))); } async Task <bool> DirExistsAndIsNotEmpty() { if (!await ioManager.DirectoryExists(model.Path, cancellationToken).ConfigureAwait(false)) { return(false); } var filesTask = ioManager.GetFiles(model.Path, cancellationToken); var dirsTask = ioManager.GetDirectories(model.Path, cancellationToken); var files = await filesTask.ConfigureAwait(false); var dirs = await dirsTask.ConfigureAwait(false); return(files.Concat(dirs).Any()); } var dirExistsTask = DirExistsAndIsNotEmpty(); bool attached = false; if (await ioManager.FileExists(model.Path, cancellationToken).ConfigureAwait(false) || await dirExistsTask.ConfigureAwait(false)) { if (!await ioManager.FileExists(ioManager.ConcatPath(model.Path, InstanceAttachFileName), cancellationToken).ConfigureAwait(false)) { return(Conflict(new ErrorMessage(ErrorCode.InstanceAtExistingPath))); } else { attached = true; } } var newInstance = CreateDefaultInstance(model); DatabaseContext.Instances.Add(newInstance); try { await DatabaseContext.Save(cancellationToken).ConfigureAwait(false); try { // actually reserve it now await ioManager.CreateDirectory(unNormalizedPath, cancellationToken).ConfigureAwait(false); await ioManager.DeleteFile(ioManager.ConcatPath(targetInstancePath, InstanceAttachFileName), cancellationToken).ConfigureAwait(false); } catch { // oh shit delete the model DatabaseContext.Instances.Remove(newInstance); // DCT: Operation must always run await DatabaseContext.Save(default).ConfigureAwait(false);
/// <summary> /// Run a quick DD instance to test the DMAPI is installed on the target code /// </summary> /// <param name="timeout">The timeout in seconds for validation</param> /// <param name="securityLevel">The <see cref="DreamDaemonSecurity"/> level to use to validate the API</param> /// <param name="job">The <see cref="Models.CompileJob"/> for the operation</param> /// <param name="byondLock">The current <see cref="IByondExecutableLock"/></param> /// <param name="portToUse">The port to use for API validation</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the operation</param> /// <returns>A <see cref="Task"/> representing the running operation</returns> async Task VerifyApi(uint timeout, DreamDaemonSecurity securityLevel, Models.CompileJob job, IByondExecutableLock byondLock, ushort portToUse, CancellationToken cancellationToken) { logger.LogTrace("Verifying DMAPI..."); var launchParameters = new DreamDaemonLaunchParameters { AllowWebClient = false, PrimaryPort = portToUse, SecurityLevel = securityLevel, StartupTimeout = timeout }; var dirA = ioManager.ConcatPath(job.DirectoryName.ToString(), ADirectoryName); job.MinimumSecurityLevel = securityLevel; // needed for the TempDmbProvider var timeoutAt = DateTimeOffset.Now.AddSeconds(timeout); using (var provider = new TemporaryDmbProvider(ioManager.ResolvePath(dirA), String.Concat(job.DmeName, DmbExtension), job)) using (var controller = await sessionControllerFactory.LaunchNew(launchParameters, provider, byondLock, true, true, true, cancellationToken).ConfigureAwait(false)) { var launchResult = await controller.LaunchResult.ConfigureAwait(false); var now = DateTimeOffset.Now; if (now < timeoutAt && launchResult.StartupTime.HasValue) { var timeoutTask = Task.Delay(timeoutAt - now, cancellationToken); await Task.WhenAny(controller.Lifetime, timeoutTask).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); } if (controller.Lifetime.IsCompleted) { var validationStatus = controller.ApiValidationStatus; logger.LogTrace("API validation status: {0}", validationStatus); switch (validationStatus) { case ApiValidationStatus.RequiresUltrasafe: job.MinimumSecurityLevel = DreamDaemonSecurity.Ultrasafe; return; case ApiValidationStatus.RequiresSafe: if (securityLevel == DreamDaemonSecurity.Ultrasafe) { throw new JobException("This game must be run with at least the 'Safe' DreamDaemon security level!"); } job.MinimumSecurityLevel = DreamDaemonSecurity.Safe; return; case ApiValidationStatus.RequiresTrusted: if (securityLevel != DreamDaemonSecurity.Trusted) { throw new JobException("This game must be run with at least the 'Trusted' DreamDaemon security level!"); } job.MinimumSecurityLevel = DreamDaemonSecurity.Trusted; return; case ApiValidationStatus.NeverValidated: break; case ApiValidationStatus.BadValidationRequest: throw new JobException("Recieved an unrecognized API validation request from DreamDaemon!"); case ApiValidationStatus.UnaskedValidationRequest: default: throw new InvalidOperationException(String.Format(CultureInfo.InvariantCulture, "Session controller returned unexpected ApiValidationStatus: {0}", validationStatus)); } } throw new JobException("DMAPI validation timed out!"); } }
/// <summary> /// Installs a BYOND <paramref name="version"/> if it isn't already /// </summary> /// <param name="version">The BYOND <see cref="Version"/> to install</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the operation</param> /// <returns>A <see cref="Task"/> representing the running operation</returns> async Task InstallVersion(Version version, CancellationToken cancellationToken) { var ourTcs = new TaskCompletionSource <object>(); Task inProgressTask; var versionKey = VersionKey(version); bool installed; lock (installedVersions) { installed = installedVersions.TryGetValue(versionKey, out inProgressTask); if (!installed) { installedVersions.Add(versionKey, ourTcs.Task); } } if (installed) { using (cancellationToken.Register(() => ourTcs.SetCanceled())) { await Task.WhenAny(ourTcs.Task, inProgressTask).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); return; } } else { logger.LogDebug("Requested BYOND version {0} not currently installed. Doing so now..."); } // okay up to us to install it then try { await eventConsumer.HandleEvent(EventType.ByondInstallStart, new List <string> { versionKey }, cancellationToken).ConfigureAwait(false); var downloadTask = byondInstaller.DownloadVersion(version, cancellationToken); await ioManager.DeleteDirectory(versionKey, cancellationToken).ConfigureAwait(false); try { var download = await downloadTask.ConfigureAwait(false); await ioManager.CreateDirectory(versionKey, cancellationToken).ConfigureAwait(false); var extractPath = ioManager.ResolvePath(versionKey); logger.LogTrace("Extracting downloaded BYOND zip to {0}...", extractPath); await ioManager.ZipToDirectory(extractPath, download, cancellationToken).ConfigureAwait(false); await byondInstaller.InstallByond(extractPath, version, cancellationToken).ConfigureAwait(false); // make sure to do this last because this is what tells us we have a valid version in the future await ioManager.WriteAllBytes(ioManager.ConcatPath(versionKey, VersionFileName), Encoding.UTF8.GetBytes(version.ToString()), cancellationToken).ConfigureAwait(false); } catch (WebException e) { // since the user can easily provide non-exitent version numbers, we'll turn this into a JobException throw new JobException(ErrorCode.ByondDownloadFail, e); } catch (OperationCanceledException) { throw; } catch { await ioManager.DeleteDirectory(versionKey, cancellationToken).ConfigureAwait(false); throw; } ourTcs.SetResult(null); } catch (Exception e) { if (!(e is OperationCanceledException)) { await eventConsumer.HandleEvent(EventType.ByondInstallFail, new List <string> { e.Message }, cancellationToken).ConfigureAwait(false); } lock (installedVersions) installedVersions.Remove(versionKey); ourTcs.SetException(e); throw; } }