/// <summary> /// Sets a variable in a <see cref="PreprocessReader"/> such that the value will be safe /// to be included in a Bash variable set statement. /// </summary> /// <param name="preprocessReader">The reader.</param> /// <param name="name">The variable name.</param> /// <param name="value">The variable value.</param> private static void SetBashVariable(PreprocessReader preprocessReader, string name, object value) { Covenant.Requires <ArgumentNullException>(preprocessReader != null); Covenant.Requires <ArgumentNullException>(name != null); if (value == null) { preprocessReader.Set(name, value); } else { if (value is bool) { value = (bool)value ? "true" : "false"; } else if (value is int) { value = value.ToString(); } else { // We need to escape single and double quotes. var stringValue = value.ToString(); stringValue = stringValue.Replace("'", "\\'"); stringValue = stringValue.Replace("\"", "\\\""); value = $"\"{stringValue}\""; } preprocessReader.Set(name, value); } }
private void SetVariables(PreprocessReader reader, KeyValuePair <string, string>[] variables) { foreach (var variable in variables) { reader.Set(variable.Key, variable.Value); } }
/// <summary> /// Expands any environment variables of the form <b>${NAME}</b> in the input /// string and returns the expanded result. /// </summary> /// <param name="input">The input string.</param> /// <returns>The expanded output string.</returns> private string ExpandEnvironmentVars(string input) { Covenant.Requires <ArgumentNullException>(input != null); using (var reader = new PreprocessReader(input)) { reader.VariableExpansionRegex = PreprocessReader.CurlyVariableExpansionRegex; // Load the environment variables. foreach (DictionaryEntry item in Environment.GetEnvironmentVariables()) { // $hack(jeff.lill): // // Some common Windows enmvironment variables names include characters // like parens that are not compatible with PreprocessReader. We're // just going to catch the exceptions and ignore these. var key = (string)item.Key; if (PreprocessReader.VariableValidationRegex.IsMatch(key)) { reader.Set(key, (string)item.Value); } } // Perform the substitutions. return(reader.ReadToEnd()); } }
/// <summary> /// Reads the script text from an embedded resource file, replacing any variable references with the /// variable's value. /// </summary> /// <param name="scriptFile">The embedded resurce script file.</param> /// <returns>The processed script text.</returns> private string LoadScript(IStaticFile scriptFile) { using (var reader = scriptFile.OpenReader()) { using (var preprocessReader = new PreprocessReader(reader, variables)) { return(preprocessReader.ReadToEnd()); } } }
/// <summary> /// Reads the script text from a file path, replacing any variable references with the /// variable's value. /// </summary> /// <param name="scriptPath">The script file path.</param> /// <returns>The processed script text.</returns> private string LoadScript(string scriptPath) { using (var reader = new StreamReader(scriptPath)) { using (var preprocessReader = new PreprocessReader(reader, variables)) { return(preprocessReader.ReadToEnd()); } } }
/// <summary> /// Reads the script text from a file path, replacing any variable references with the /// variable's value. /// </summary> /// <param name="scriptPath">The script file path.</param> /// <returns>The processed script text.</returns> private string LoadScript(string scriptPath) { using (var reader = new StreamReader(scriptPath)) { using (var preprocessReader = new PreprocessReader(reader, variables)) { preprocessReader.VariableExpansionRegex = PreprocessReader.CurlyVariableExpansionRegex; return(preprocessReader.ReadToEnd()); } } }
/// <summary> /// Reads the script text from an embedded resource file, replacing any variable references with the /// variable's value. /// </summary> /// <param name="scriptFile">The embedded resurce script file.</param> /// <returns>The processed script text.</returns> private string LoadScript(IStaticFile scriptFile) { using (var reader = scriptFile.OpenReader()) { using (var preprocessReader = new PreprocessReader(reader, variables)) { preprocessReader.VariableExpansionRegex = PreprocessReader.CurlyVariableExpansionRegex; return(preprocessReader.ReadToEnd()); } } }
/// <summary> /// Parses a hive definition from JSON text. /// </summary> /// <param name="json">The JSON text.</param> /// <param name="strict">Optionally require that all input properties map to <see cref="HiveDefinition"/> properties.</param> /// <returns>The parsed <see cref="HiveDefinition"/>.</returns> /// <remarks> /// <note> /// The source is first preprocessed using <see cref="PreprocessReader"/> /// and then is parsed as JSON. /// </note> /// </remarks> public static HiveDefinition FromJson(string json, bool strict = false) { Covenant.Requires <ArgumentNullException>(json != null); using (var stringReader = new StringReader(json)) { using (var preprocessReader = new PreprocessReader(stringReader)) { return(NeonHelper.JsonDeserialize <HiveDefinition>(preprocessReader.ReadToEnd(), strict: strict)); } } }
public async Task NotImplemented() { var reader = new PreprocessReader(string.Empty); Assert.Throws <NotImplementedException>(() => reader.Peek()); Assert.Throws <NotImplementedException>(() => reader.Read()); Assert.Throws <NotImplementedException>(() => reader.Read(new char[100], 0, 100)); await Assert.ThrowsAsync <NotImplementedException>(async() => await reader.ReadAsync(new char[100], 0, 100)); Assert.Throws <NotImplementedException>(() => reader.ReadBlock(new char[100], 0, 100)); await Assert.ThrowsAsync <NotImplementedException>(async() => await reader.ReadBlockAsync(new char[100], 0, 100)); }
public void Comments_BadMarkers() { // Verify that we validate reasonable markers. using (var reader = new PreprocessReader(string.Empty)) { Assert.Throws <ArgumentNullException>(() => reader.AddCommentMarker(null)); Assert.Throws <ArgumentNullException>(() => reader.AddCommentMarker(string.Empty)); Assert.Throws <ArgumentException>(() => reader.AddCommentMarker(" ")); // Whitespace not allowed Assert.Throws <ArgumentException>(() => reader.AddCommentMarker(" //")); // Whitespace not allowed Assert.Throws <ArgumentException>(() => reader.AddCommentMarker("\t")); // Whitespace not allowed Assert.Throws <ArgumentException>(() => reader.AddCommentMarker("1")); // Not punctation Assert.Throws <ArgumentException>(() => reader.AddCommentMarker("a")); // Not punctation } }
/// <summary> /// Parses a cluster definition from YAML text. /// </summary> /// <param name="yaml">The JSON text.</param> /// <param name="strict">Optionally require that all input properties map to <see cref="ClusterDefinition"/> properties.</param> /// <returns>The parsed <see cref="ClusterDefinition"/>.</returns> /// <remarks> /// <note> /// The source is first preprocessed using <see cref="PreprocessReader"/> /// and then is parsed as YAML. /// </note> /// </remarks> public static ClusterDefinition FromYaml(string yaml, bool strict = false) { Covenant.Requires <ArgumentNullException>(yaml != null, nameof(yaml)); using (var stringReader = new StringReader(yaml)) { using (var preprocessReader = new PreprocessReader(stringReader)) { var clusterDefinition = NeonHelper.YamlDeserialize <ClusterDefinition>(preprocessReader.ReadToEnd(), strict: strict); clusterDefinition.Validate(); return(clusterDefinition); } } }
/// <summary> /// Uploads a resource file to the remote server after performing any necessary preprocessing. /// </summary> /// <typeparam name="TMetadata">The node metadata type.</typeparam> /// <param name="node">The remote node.</param> /// <param name="hiveDefinition">The hive definition or <c>null</c>.</param> /// <param name="file">The resource file.</param> /// <param name="targetPath">The target path on the remote server.</param> private static void UploadFile <TMetadata>(this SshProxy <TMetadata> node, HiveDefinition hiveDefinition, ResourceFiles.File file, string targetPath) where TMetadata : class { using (var input = file.ToStream()) { if (file.HasVariables) { // We need to expand any variables. Note that if we don't have a // hive definition or for undefined variables, we're going to // have the variables expand to the empty string. using (var msExpanded = new MemoryStream()) { using (var writer = new StreamWriter(msExpanded)) { var preprocessReader = new PreprocessReader(new StreamReader(input)) { DefaultVariable = string.Empty, ExpandVariables = true, ProcessCommands = false, StripComments = false }; if (hiveDefinition != null) { SetHiveVariables(preprocessReader, hiveDefinition, node.Metadata as NodeDefinition); } foreach (var line in preprocessReader.Lines()) { writer.WriteLine(line); } writer.Flush(); msExpanded.Position = 0; node.UploadText(targetPath, msExpanded, tabStop: 4, outputEncoding: Encoding.UTF8); } } } else { node.UploadText(targetPath, input, tabStop: 4, outputEncoding: Encoding.UTF8); } } }
public void Defaults() { var reader = new PreprocessReader(new StreamReader(new MemoryStream())); Assert.Equal(PreprocessReader.AngleVariableExpansionRegex, reader.VariableExpansionRegex); Assert.True(reader.ProcessStatements); Assert.True(reader.StripComments); Assert.False(reader.RemoveComments); Assert.False(reader.RemoveBlank); Assert.True(reader.StatementMarker == '#'); Assert.Equal(0, reader.TabStop); Assert.Equal(LineEnding.Platform, reader.LineEnding); Assert.True(reader.ExpandVariables); Assert.Equal(0, reader.Indent); Assert.Null(reader.DefaultVariable); Assert.Null(reader.DefaultEnvironmentVariable); }
public void Comments_CustomMarkers() { // Verify that we can handle multiple custom comment prefixes. var input = @"# This is a comment # This is a comment This is a test // This is a comment of the emergency # not a comment broadcasting system # a ab abc "; var expected = @" This is a test of the emergency # not a comment broadcasting system a ab abc "; using (var reader = new PreprocessReader(input)) { reader.ClearCommentMarkers(); reader.AddCommentMarker("#"); reader.AddCommentMarker("//"); var output = reader.ReadToEnd(); Assert.Equal(expected, output); } }
public void DisableStatements() { const string input = @"# line1 line2 # line3 "; using (var reader = new PreprocessReader(input) { LineEnding = LineEnding.CRLF }) { reader.ProcessStatements = false; Assert.Equal("# line1\r\nline2\r\n# line3\r\n", reader.ReadToEnd()); } }
/// <summary> /// Parses a YAML cluster definition from a file. /// </summary> /// <param name="path">The file path.</param> /// <param name="strict">Optionally require that all input properties map to <see cref="ClusterDefinition"/> properties.</param> /// <returns>The parsed <see cref="ClusterDefinition"/>.</returns> /// <exception cref="ArgumentException">Thrown if the definition is not valid.</exception> /// <remarks> /// <note> /// The source is first preprocessed using <see cref="PreprocessReader"/> /// and then is parsed as JSON. /// </note> /// </remarks> public static ClusterDefinition FromFile(string path, bool strict = false) { Covenant.Requires <ArgumentNullException>(path != null, nameof(path)); using (var stream = new FileStream(path, FileMode.Open, FileAccess.Read)) { using (var reader = new StreamReader(stream)) { using (var preprocessor = new PreprocessReader(reader)) { preprocessor.ProcessStatements = true; var clusterDefinition = NeonHelper.YamlDeserialize <ClusterDefinition>(preprocessor.ReadToEnd(), strict: strict); if (clusterDefinition == null) { throw new ArgumentException($"Invalid cluster definition in [{path}].", nameof(path)); } // Populate the [node.Name] properties from the dictionary name. foreach (var item in clusterDefinition.NodeDefinitions) { var node = item.Value; if (string.IsNullOrEmpty(node.Name)) { node.Name = item.Key; } else if (item.Key != node.Name) { throw new FormatException($"The node names don't match [\"{item.Key}\" != \"{node.Name}\"]."); } } clusterDefinition.Validate(); return(clusterDefinition); } } } }
public async Task LineEndings() { const string input = @"line1 line2 line3 "; using (var reader = new PreprocessReader(input) { LineEnding = LineEnding.CRLF }) { Assert.Equal("line1\r\nline2\r\nline3\r\n", reader.ReadToEnd()); } using (var reader = new PreprocessReader(input) { LineEnding = LineEnding.LF }) { Assert.Equal("line1\nline2\nline3\n", reader.ReadToEnd()); } using (var reader = new PreprocessReader(input) { LineEnding = LineEnding.CRLF }) { Assert.Equal("line1\r\nline2\r\nline3\r\n", await reader.ReadToEndAsync()); } using (var reader = new PreprocessReader(input) { LineEnding = LineEnding.LF }) { Assert.Equal("line1\nline2\nline3\n", await reader.ReadToEndAsync()); } }
private PreprocessReader CreateReader(string input) { var reader = new PreprocessReader(input) { DefaultVariable = defaultVariable, DefaultEnvironmentVariable = defaultEnvironmentVariable, TabStop = tabStop, ExpandVariables = expandVariables, StripComments = stripComments, RemoveComments = removeComments, RemoveBlank = removeBlank, ProcessStatements = processCommands, StatementMarker = statementMarker, Indent = indent, LineEnding = lineEnding }; if (variableRegex != null) { reader.VariableExpansionRegex = variableRegex; } return(reader); }
/// <summary> /// Sets cluster definition related variables for a <see cref="PreprocessReader"/>. /// </summary> /// <param name="preprocessReader">The reader.</param> /// <param name="clusterDefinition">The cluster definition.</param> /// <param name="kubeSetupInfo">The Kubernetes setup details.</param> /// <param name="nodeDefinition">The target node definition.</param> private static void SetClusterVariables(PreprocessReader preprocessReader, ClusterDefinition clusterDefinition, KubeSetupInfo kubeSetupInfo, NodeDefinition nodeDefinition) { Covenant.Requires <ArgumentNullException>(preprocessReader != null, nameof(preprocessReader)); Covenant.Requires <ArgumentNullException>(clusterDefinition != null, nameof(clusterDefinition)); Covenant.Requires <ArgumentNullException>(kubeSetupInfo != null, nameof(kubeSetupInfo)); // Generate the master node variables in sorted order. The variable // names will be formatted as: // // NEON_MASTER_# // // where [#] is the zero-based index of the node. This is compatible // with the [getmaster] function included the script. // // Each variable defines an associative array with [name] and [address] // properties. // // Then generate the NEON_MASTER_NAMES and NEON_MASTER_ADDRESSES arrays. // // NOTE: We need to use Linux-style line endings. var sbMasters = new StringBuilder(); var sbMasterNamesArray = new StringBuilder(); var sbMasterAddressesArray = new StringBuilder(); var sbPeerMasterAddressesArray = new StringBuilder(); var sbMasterNodesSummary = new StringBuilder(); var index = 0; var masterNameWidth = 0; sbMasterNamesArray.Append("("); sbMasterAddressesArray.Append("("); sbPeerMasterAddressesArray.Append("("); foreach (var master in clusterDefinition.SortedMasters) { sbMasters.Append($"declare -x -A NEON_MASTER_{index}\n"); sbMasters.Append($"NEON_MASTER_{index}=( [\"name\"]=\"{master.Name}\" [\"address\"]=\"{master.PrivateAddress}\" )\n"); sbMasters.Append("\n"); index++; sbMasterNamesArray.Append($" \"{master.Name}\""); sbMasterAddressesArray.Append($" \"{master.PrivateAddress}\""); if (master != nodeDefinition) { sbPeerMasterAddressesArray.Append($" \"{master.PrivateAddress}\""); } masterNameWidth = Math.Max(master.Name.Length, masterNameWidth); } sbMasterNamesArray.Append(" )"); sbMasterAddressesArray.Append(" )"); sbPeerMasterAddressesArray.Append(" )"); foreach (var master in clusterDefinition.SortedMasters) { var nameField = master.Name; if (nameField.Length < masterNameWidth) { nameField += new string(' ', masterNameWidth - nameField.Length); } // The blanks below are just enough so that the "=" sign lines up // with the summary output from [cluster.conf.sh]. if (sbMasterNodesSummary.Length == 0) { sbMasterNodesSummary.Append($" echo \"NEON_MASTER_NODES = {nameField}: {master.PrivateAddress}\" 1>&2\n"); } else { sbMasterNodesSummary.Append($" echo \" {nameField}: {master.PrivateAddress}\" 1>&2\n"); } } foreach (var master in clusterDefinition.SortedMasters) { sbMasters.Append($"declare -x -A NEON_MASTER_{index}\n"); sbMasters.Append($"NEON_MASTER_{index}=( [\"name\"]=\"{master.Name}\" [\"address\"]=\"{master.PrivateAddress}\" )\n"); index++; } sbMasters.Append("\n"); sbMasters.Append($"declare -x NEON_MASTER_NAMES={sbMasterNamesArray}\n"); sbMasters.Append($"declare -x NEON_MASTER_ADDRESSES={sbMasterAddressesArray}\n"); sbMasters.Append("\n"); // Generate the master and worker NTP time sources. var masterTimeSources = string.Empty; var workerTimeSources = string.Empty; if (clusterDefinition.TimeSources != null) { foreach (var source in clusterDefinition.TimeSources) { if (string.IsNullOrWhiteSpace(source)) { continue; } if (masterTimeSources.Length > 0) { masterTimeSources += " "; } masterTimeSources += $"\"{source}\""; } } foreach (var master in clusterDefinition.SortedMasters) { if (workerTimeSources.Length > 0) { workerTimeSources += " "; } workerTimeSources += $"\"{master.PrivateAddress}\""; } if (string.IsNullOrWhiteSpace(masterTimeSources)) { // Default to a reasonable public time source. masterTimeSources = "\"pool.ntp.org\""; } // Set the variables. preprocessReader.Set("load-cluster-conf", KubeHostFolders.Config + "/cluster.conf.sh --echo-summary"); preprocessReader.Set("load-cluster-conf-quiet", KubeHostFolders.Config + "/cluster.conf.sh"); SetBashVariable(preprocessReader, "cluster.provisioner", clusterDefinition.Provisioner); SetBashVariable(preprocessReader, "node.driveprefix", clusterDefinition.DrivePrefix); SetBashVariable(preprocessReader, "neon.folders.archive", KubeHostFolders.Archive(KubeConst.SysAdminUser)); SetBashVariable(preprocessReader, "neon.folders.bin", KubeHostFolders.Bin); SetBashVariable(preprocessReader, "neon.folders.exec", KubeHostFolders.Exec(KubeConst.SysAdminUser)); SetBashVariable(preprocessReader, "neon.folders.config", KubeHostFolders.Config); SetBashVariable(preprocessReader, "neon.folders.setup", KubeHostFolders.Setup); SetBashVariable(preprocessReader, "neon.folders.state", KubeHostFolders.State); SetBashVariable(preprocessReader, "neon.folders.tmpfs", KubeHostFolders.Tmpfs); SetBashVariable(preprocessReader, "neon.folders.tools", KubeHostFolders.Bin); SetBashVariable(preprocessReader, "nodes.master.count", clusterDefinition.Masters.Count()); preprocessReader.Set("nodes.masters", sbMasters); preprocessReader.Set("nodes.masters.summary", sbMasterNodesSummary); SetBashVariable(preprocessReader, "ntp.master.sources", masterTimeSources); NewMethod(preprocessReader, workerTimeSources); SetBashVariable(preprocessReader, "docker.packageuri", kubeSetupInfo.DockerPackageUbuntuUri); SetBashVariable(preprocessReader, "neon.kube.kubeadm.package_version", kubeSetupInfo.KubeAdmPackageUbuntuVersion); SetBashVariable(preprocessReader, "neon.kube.kubectl.package_version", kubeSetupInfo.KubeCtlPackageUbuntuVersion); SetBashVariable(preprocessReader, "neon.kube.kubelet.package_version", kubeSetupInfo.KubeletPackageUbuntuVersion); //----------------------------------------------------------------- // Configure the variables for the [setup-disk.sh] script. switch (clusterDefinition.Hosting.Environment) { case HostingEnvironments.Aws: throw new NotImplementedException("$todo(jefflill)"); case HostingEnvironments.Azure: // The primary Azure data drive is [/dev/sdb] so any mounted drive will be [/dev/sdc]. if (nodeDefinition.Azure.HardDriveCount == 0) { SetBashVariable(preprocessReader, "data.disk", "PRIMARY"); } else { SetBashVariable(preprocessReader, "data.disk", "/dev/sdc"); } break; case HostingEnvironments.Google: throw new NotImplementedException("$todo(jefflill)"); case HostingEnvironments.HyperV: case HostingEnvironments.HyperVLocal: case HostingEnvironments.Machine: case HostingEnvironments.Unknown: case HostingEnvironments.XenServer: // VMs for all of these environments simply host their data on the // primary OS disk only for now, the idea being that this disk // can be sized up as necessary. There are valid scenarios where // folks would like the data on a different drive (e.g. for better // performance). I'm putting support for that on the backlog. SetBashVariable(preprocessReader, "data.disk", "PRIMARY"); break; default: throw new NotImplementedException($"The [{clusterDefinition.Hosting.Environment}] hosting environment is not implemented."); } }
public void ProfileReferences() { // Verify that [IProfileClient] integration works by starting a profile // server, injecting an [IProfileClient] implementation and then verifying // that secret passwords, secret values, and profile values can be // resolved. var pipeName = Guid.NewGuid().ToString("d"); var server = new ProfileServer(pipeName); server.GetProfileValueHandler = (request, name) => { if (name == "missing") { return(ProfileHandlerResult.CreateError(request, ProfileStatus.NotFound, $"[{name}] variable not found.")); } return(ProfileHandlerResult.Create($"{name}-profile")); }; server.GetSecretPasswordHandler = (request, name, vault, masterPassword) => { if (name == "missing") { return(ProfileHandlerResult.CreateError(request, ProfileStatus.NotFound, $"[{name}] variable not found.")); } if (vault == null) { return(ProfileHandlerResult.Create($"{name}-password")); } else { return(ProfileHandlerResult.Create($"{name}-password-{vault}")); } }; server.GetSecretValueHandler = (request, name, vault, masterPassword) => { if (name == "missing") { return(ProfileHandlerResult.CreateError(request, ProfileStatus.NotFound, $"[{name}] variable not found.")); } if (vault == null) { return(ProfileHandlerResult.Create($"{name}-secret")); } else { return(ProfileHandlerResult.Create($"{name}-secret-{vault}")); } }; server.Start(); try { var client = new ProfileClient(pipeName); NeonHelper.ServiceContainer.AddSingleton <IProfileClient>(client); //------------------------------------------------------------- // Verify secret passwords var source = "TEST = $<<<password:test>>>"; var output = new PreprocessReader(source).ReadToEnd().Trim(); Assert.Equal("TEST = test-password", output); source = "TEST = $<<<password:test:vault>>>"; output = new PreprocessReader(source).ReadToEnd().Trim(); Assert.Equal("TEST = test-password-vault", output); Assert.Throws <ProfileException>(() => new PreprocessReader("TEST = $<<<password:missing>>>").ReadToEnd()); //------------------------------------------------------------- // Verify secret values source = "TEST = $<<<secret:test>>>"; output = new PreprocessReader(source).ReadToEnd().Trim(); Assert.Equal("TEST = test-secret", output); source = "TEST = $<<<secret:test:vault>>>"; output = new PreprocessReader(source).ReadToEnd().Trim(); Assert.Equal("TEST = test-secret-vault", output); Assert.Throws <ProfileException>(() => new PreprocessReader("TEST = $<<<secret:missing>>>").ReadToEnd()); //------------------------------------------------------------- // Verify secret values targeting a specific property. source = "TEST = $<<<secret:test[field]>>>"; output = new PreprocessReader(source).ReadToEnd().Trim(); Assert.Equal("TEST = test[field]-secret", output); source = "TEST = $<<<secret:test[field]:vault>>>"; output = new PreprocessReader(source).ReadToEnd().Trim(); Assert.Equal("TEST = test[field]-secret-vault", output); Assert.Throws <ProfileException>(() => new PreprocessReader("TEST = $<<<secret:missing>>>").ReadToEnd()); //------------------------------------------------------------- // Verify profile values source = "TEST = $<<<profile:test>>>"; output = new PreprocessReader(source).ReadToEnd().Trim(); Assert.Equal("TEST = test-profile", output); Assert.Throws <ProfileException>(() => new PreprocessReader("TEST = $<<<profile:missing>>>").ReadToEnd()); } finally { NeonHelper.ServiceContainer.Remove(NeonHelper.ServiceContainer.Single(service => service.ServiceType == typeof(IProfileClient))); server.Dispose(); } }
/// <inheritdoc/> public override async Task RunAsync(CommandLine commandLine) { if (commandLine.HasHelpOption) { Console.WriteLine(usage); Program.Exit(0); } var splitCommandLine = commandLine.Split("--"); var leftCommandLine = splitCommandLine.Left; var rightCommandLine = splitCommandLine.Right; if (rightCommandLine == null || rightCommandLine.Arguments.Length == 0) { Console.Error.WriteLine("*** ERROR: Expected a command after a [--] argument."); Program.Exit(1); } // All arguments on the left command line should be VARIABLES files. // We're going to open each of these and set any enviroment variables // like [NAME=VALUE] we find. // // Note that these files may be encrypted. If any are, we'll decrypt // to a temporary file before we read them. foreach (var path in leftCommandLine.Arguments) { if (!File.Exists(path)) { Console.Error.WriteLine($"*** ERROR: File [{path}] does not exist."); Program.Exit(1); } DecryptWithAction(path, decryptedPath => { var lineNumber = 1; foreach (var line in File.ReadAllLines(decryptedPath)) { var trimmed = line.Trim(); if (line == string.Empty || line.StartsWith("#")) { continue; } var fields = line.Split('=', 2); if (fields.Length != 2 || fields[0] == string.Empty) { Console.Error.WriteLine($"*** ERROR: [{path}:{lineNumber}] is not formatted like: NAME=VALUE"); Program.Exit(1); } var name = fields[0].Trim(); var value = fields[1].Trim(); Environment.SetEnvironmentVariable(name, value); lineNumber++; } }); } // Any left command line options with a "--" prefix also specify environment variables. foreach (var option in leftCommandLine.Options.Where(o => o.Key.StartsWith("--"))) { Environment.SetEnvironmentVariable(option.Key.Substring(2), option.Value); } // We've read all of the variable files and left command line options // and initialized all environment variables. Now we need to process // and then execute the right command line. var tempFiles = new List <TempFile>(); try { var subcommand = rightCommandLine.Items; // Note that the first element of the subcommand specifies the // executable so we don't need to process that. for (int i = 1; i < subcommand.Length; i++) { var arg = subcommand[i]; if (arg.StartsWith("_...")) { // Argument is a reference to a potentially encrypted // file that needs to be passed decrypted. var path = arg.Substring(4); if (!File.Exists(path)) { Console.Error.WriteLine($"*** ERROR: File [{path}] does not exist."); Program.Exit(1); } if (NeonVault.IsEncrypted(path)) { var tempFile = new TempFile(); tempFiles.Add(tempFile); vault.Decrypt(path, tempFile.Path); path = tempFile.Path; } subcommand[i] = path; } else if (arg.StartsWith("_..")) { // Argument is a reference to a potentially encrypted text file // with environment variable references we'll need to update. var path = arg.Substring(3); if (!File.Exists(path)) { Console.Error.WriteLine($"*** ERROR: File [{path}] does not exist."); Program.Exit(1); } if (NeonVault.IsEncrypted(path)) { var tempFile = new TempFile(); tempFiles.Add(tempFile); vault.Decrypt(path, tempFile.Path); path = tempFile.Path; } subcommand[i] = path; // Perform the subsitutions. var unprocessed = File.ReadAllText(path); var processed = string.Empty; var linuxLineEndings = !unprocessed.Contains("\r\n"); using (var reader = new StreamReader(path)) { using (var preprocessor = new PreprocessReader(reader)) { preprocessor.ExpandVariables = true; preprocessor.LineEnding = linuxLineEndings ? LineEnding.LF : LineEnding.CRLF; preprocessor.ProcessStatements = false; preprocessor.StripComments = false; preprocessor.VariableExpansionRegex = PreprocessReader.AngleVariableExpansionRegex; processed = preprocessor.ReadToEnd(); } } File.WriteAllText(path, processed); } else if (arg.StartsWith("_.")) { // Argument is a reference to an environment variable. var name = arg.Substring(2); if (name == string.Empty) { Console.Error.WriteLine($"*** ERROR: Subcommand argument [{arg}] is not valid."); Program.Exit(1); } var value = Environment.GetEnvironmentVariable(name); if (value == null) { Console.Error.WriteLine($"*** ERROR: Subcommand argument [{arg}] references an undefined environment variable."); Program.Exit(2); } subcommand[i] = value; } else if (arg.StartsWith("-")) { // Argument is a command line option. We'll check to see if // it contains a reference to an environment variable. var valuePos = arg.IndexOf("=_."); if (valuePos != -1) { var optionPart = arg.Substring(0, valuePos); var name = arg.Substring(valuePos + 3); if (name == string.Empty) { Console.Error.WriteLine($"*** ERROR: Subcommand argument [{arg}] is not valid."); Program.Exit(1); } var value = Environment.GetEnvironmentVariable(name); if (value == null) { Console.Error.WriteLine($"*** ERROR: Subcommand argument [{arg}] references an undefined environment variable."); Program.Exit(1); } subcommand[i] = $"{optionPart}={value}"; } } else { // Otherwise, expand any envrionment variable references. subcommand[i] = Environment.ExpandEnvironmentVariables(subcommand[i]); } } // Execute the subcommand. var subcommandArgs = new List <object>(); foreach (var subcommandArg in subcommand) { subcommandArgs.Add(subcommandArg); } var exitCode = NeonHelper.Execute(subcommand[0], subcommandArgs.Skip(1).ToArray()); Program.Exit(exitCode); } finally { foreach (var tempFile in tempFiles) { tempFile.Dispose(); } } Program.Exit(0); await Task.CompletedTask; }
/// <summary> /// Sets hive definition related variables for a <see cref="PreprocessReader"/>. /// </summary> /// <param name="preprocessReader">The reader.</param> /// <param name="hiveDefinition">The hive definition.</param> /// <param name="nodeDefinition">The target node definition.</param> private static void SetHiveVariables(PreprocessReader preprocessReader, HiveDefinition hiveDefinition, NodeDefinition nodeDefinition) { Covenant.Requires <ArgumentNullException>(preprocessReader != null); Covenant.Requires <ArgumentNullException>(hiveDefinition != null); // Generate the manager node variables in sorted order. The variable // names will be formatted as: // // NEON_MANAGER_# // // where [#] is the zero-based index of the node. This is compatible // with the [getmanager] function included the script. // // Each variable defines an associative array with [name] and [address] // properties. // // Then generate the NEON_MANAGER_NAMES and NEON_MANAGER_ADDRESSES arrays. // // NOTE: We need to use Linux-style line endings. var sbManagers = new StringBuilder(); var sbManagerNamesArray = new StringBuilder(); var sbManagerAddressesArray = new StringBuilder(); var sbPeerManagerAddressesArray = new StringBuilder(); var sbManagerNodesSummary = new StringBuilder(); var index = 0; var managerNameWidth = 0; sbManagerNamesArray.Append("("); sbManagerAddressesArray.Append("("); sbPeerManagerAddressesArray.Append("("); foreach (var manager in hiveDefinition.SortedManagers) { sbManagers.Append($"declare -x -A NEON_MANAGER_{index}\n"); sbManagers.Append($"NEON_MANAGER_{index}=( [\"name\"]=\"{manager.Name}\" [\"address\"]=\"{manager.PrivateAddress}\" )\n"); sbManagers.Append("\n"); index++; sbManagerNamesArray.Append($" \"{manager.Name}\""); sbManagerAddressesArray.Append($" \"{manager.PrivateAddress}\""); if (manager != nodeDefinition) { sbPeerManagerAddressesArray.Append($" \"{manager.PrivateAddress}\""); } managerNameWidth = Math.Max(manager.Name.Length, managerNameWidth); } sbManagerNamesArray.Append(" )"); sbManagerAddressesArray.Append(" )"); sbPeerManagerAddressesArray.Append(" )"); foreach (var manager in hiveDefinition.SortedManagers) { var nameField = manager.Name; if (nameField.Length < managerNameWidth) { nameField += new string(' ', managerNameWidth - nameField.Length); } // The blanks below are just enough so that the "=" sign lines up // with the summary output from [hive.conf.sh]. if (sbManagerNodesSummary.Length == 0) { sbManagerNodesSummary.Append($" echo \"NEON_MANAGER_NODES = {nameField}: {manager.PrivateAddress}\" 1>&2\n"); } else { sbManagerNodesSummary.Append($" echo \" {nameField}: {manager.PrivateAddress}\" 1>&2\n"); } } foreach (var manager in hiveDefinition.SortedManagers) { sbManagers.Append($"declare -x -A NEON_MANAGER_{index}\n"); sbManagers.Append($"NEON_MANAGER_{index}=( [\"name\"]=\"{manager.Name}\" [\"address\"]=\"{manager.PrivateAddress}\" )\n"); index++; } sbManagers.Append("\n"); sbManagers.Append($"declare -x NEON_MANAGER_NAMES={sbManagerNamesArray}\n"); sbManagers.Append($"declare -x NEON_MANAGER_ADDRESSES={sbManagerAddressesArray}\n"); sbManagers.Append("\n"); if (hiveDefinition.Managers.Count() > 1) { sbManagers.Append($"declare -x NEON_MANAGER_PEERS={sbPeerManagerAddressesArray}\n"); } else { sbManagers.Append("export NEON_MANAGER_PEERS=\"\"\n"); } // Generate the manager and worker NTP time sources. var managerTimeSources = string.Empty; var workerTimeSources = string.Empty; if (hiveDefinition.TimeSources != null) { foreach (var source in hiveDefinition.TimeSources) { if (string.IsNullOrWhiteSpace(source)) { continue; } if (managerTimeSources.Length > 0) { managerTimeSources += " "; } managerTimeSources += $"\"{source}\""; } } foreach (var manager in hiveDefinition.SortedManagers) { if (workerTimeSources.Length > 0) { workerTimeSources += " "; } workerTimeSources += $"\"{manager.PrivateAddress}\""; } if (string.IsNullOrWhiteSpace(managerTimeSources)) { // Default to reasonable public time sources. managerTimeSources = "\"pool.ntp.org\""; } // Generate the Docker daemon command line options. var sbDockerOptions = new StringBuilder(); if (Program.ServiceManager == ServiceManager.Systemd) { sbDockerOptions.AppendWithSeparator($"-H unix:///var/run/docker.sock"); } else { throw new NotImplementedException(); } if (hiveDefinition.DebugMode) { // Expose the Docker Swarm REST API on the node's internal hive IP address so it // can be reached by apps like [neon-proxy-manager] running off the manager node // (potentially in the debugger). sbDockerOptions.AppendWithSeparator($"-H tcp://{nodeDefinition.PrivateAddress}:{NetworkPorts.Docker}"); } preprocessReader.Set("docker.options", sbDockerOptions); // Define the Consul command line options. var consulOptions = string.Empty; if (hiveDefinition.Dashboard.Consul) { if (consulOptions.Length > 0) { consulOptions += " "; } consulOptions += "-ui"; } // Format the network upstream nameservers as semicolon separated // to be compatible with the PowerDNS Recursor [forward-zones-recurse] // configuration setting. // // Note that manager nodes will recurse to upstream (external) DNS // servers and workers/pets will recurse to the managers so they can // dynamically pickup hive DNS changes. if (hiveDefinition.Network?.Nameservers == null) { // $hack(jeff.lill): // // [Network] will be null if we're just preparing servers, not doing full setup // so we'll set this to the defaults to avoid null references below. hiveDefinition.Network = new NetworkOptions(); } var nameservers = string.Empty; if (nodeDefinition.Role == NodeRole.Manager) { for (int i = 0; i < hiveDefinition.Network.Nameservers.Length; i++) { if (i > 0) { nameservers += ";"; } nameservers += hiveDefinition.Network.Nameservers[i].Trim(); } } else { foreach (var manager in hiveDefinition.SortedManagers) { if (nameservers.Length > 0) { nameservers += ";"; } nameservers += manager.PrivateAddress; } } // Set the variables. preprocessReader.Set("load-hive-conf", HiveHostFolders.Config + "/hive.conf.sh --echo-summary"); preprocessReader.Set("load-hive-conf-quiet", HiveHostFolders.Config + "/hive.conf.sh"); SetBashVariable(preprocessReader, "hive.provisioner", hiveDefinition.Provisioner); SetBashVariable(preprocessReader, "hive.rootuser", Program.MachineUsername); SetBashVariable(preprocessReader, "node.driveprefix", hiveDefinition.DrivePrefix); SetBashVariable(preprocessReader, "neon.folders.archive", HiveHostFolders.Archive); SetBashVariable(preprocessReader, "neon.folders.bin", HiveHostFolders.Bin); SetBashVariable(preprocessReader, "neon.folders.exec", HiveHostFolders.Exec); SetBashVariable(preprocessReader, "neon.folders.config", HiveHostFolders.Config); SetBashVariable(preprocessReader, "neon.folders.scripts", HiveHostFolders.Scripts); SetBashVariable(preprocessReader, "neon.folders.secrets", HiveHostFolders.Secrets); SetBashVariable(preprocessReader, "neon.folders.setup", HiveHostFolders.Setup); SetBashVariable(preprocessReader, "neon.folders.source", HiveHostFolders.Source); SetBashVariable(preprocessReader, "neon.folders.state", HiveHostFolders.State); SetBashVariable(preprocessReader, "neon.folders.tmpfs", HiveHostFolders.Tmpfs); SetBashVariable(preprocessReader, "neon.folders.tools", HiveHostFolders.Tools); preprocessReader.Set("neon.hosts.neon-log-es-data", hiveDefinition.Hostnames.LogEsData); SetBashVariable(preprocessReader, "nodes.manager.count", hiveDefinition.Managers.Count()); preprocessReader.Set("nodes.managers", sbManagers); preprocessReader.Set("nodes.manager.summary", sbManagerNodesSummary); SetBashVariable(preprocessReader, "ntp.manager.sources", managerTimeSources); SetBashVariable(preprocessReader, "ntp.worker.sources", workerTimeSources); if (!hiveDefinition.BareDocker) { // When we're not deploying bare Docker, the manager nodes will use the // configured name servers as the hive's upstream DNS and the worker // nodes will be configured to query the name servers. if (nodeDefinition.IsManager) { preprocessReader.Set("net.nameservers", nameservers); } else { var managerNameservers = string.Empty; foreach (var manager in hiveDefinition.Managers) { if (managerNameservers.Length > 0) { managerNameservers += ";"; } managerNameservers += manager.PrivateAddress.ToString(); } preprocessReader.Set("net.nameservers", managerNameservers); } } else { // All servers use the configured upstream nameservers when we're not // deploying the Local DNS. preprocessReader.Set("net.nameservers", nameservers); } SetBashVariable(preprocessReader, "net.powerdns.recursor.package.uri", hiveDefinition.Network.PdnsRecursorPackageUri); preprocessReader.Set("net.powerdns.recursor.hosts", GetPowerDnsHosts(hiveDefinition, nodeDefinition)); var dockerPackageUri = new HeadendClient().GetDockerPackageUri(hiveDefinition.Docker.Version, out var packageMessage); if (dockerPackageUri == null) { // $todo(jeff.lill: // // This should probably be replaced with hive definition validation code. Console.WriteLine($"*** ERROR: {packageMessage}"); Program.Exit(1); } SetBashVariable(preprocessReader, "docker.packageuri", dockerPackageUri); SetBashVariable(preprocessReader, "consul.version", hiveDefinition.Consul.Version); SetBashVariable(preprocessReader, "consul.options", consulOptions); SetBashVariable(preprocessReader, "consul.address", $"{hiveDefinition.Hostnames.Consul}:{hiveDefinition.Consul.Port}"); SetBashVariable(preprocessReader, "consul.fulladdress", $"https://{hiveDefinition.Hostnames.Consul}:{hiveDefinition.Consul.Port}"); SetBashVariable(preprocessReader, "consul.hostname", hiveDefinition.Hostnames.Consul); SetBashVariable(preprocessReader, "consul.port", hiveDefinition.Consul.Port); SetBashVariable(preprocessReader, "consul.tls", hiveDefinition.Consul.Tls ? "true" : "false"); SetBashVariable(preprocessReader, "vault.version", hiveDefinition.Vault.Version); SetBashVariable(preprocessReader, "vault.download", $"https://releases.hashicorp.com/vault/{hiveDefinition.Vault.Version}/vault_{hiveDefinition.Vault.Version}_linux_amd64.zip"); SetBashVariable(preprocessReader, "vault.hostname", hiveDefinition.Hostnames.Vault); SetBashVariable(preprocessReader, "vault.port", hiveDefinition.Vault.Port); SetBashVariable(preprocessReader, "vault.consulpath", "vault/"); SetBashVariable(preprocessReader, "vault.maximumlease", hiveDefinition.Vault.MaximimLease); SetBashVariable(preprocessReader, "vault.defaultlease", hiveDefinition.Vault.DefaultLease); SetBashVariable(preprocessReader, "vault.dashboard", hiveDefinition.Dashboard.Vault ? "true" : "false"); SetBashVariable(preprocessReader, "log.enabled", hiveDefinition.Log.Enabled); //----------------------------------------------------------------- // Configure the variables for the [setup-disk.sh] script. switch (hiveDefinition.Hosting.Environment) { case HostingEnvironments.Aws: throw new NotImplementedException("$todo(jeff.lill)"); case HostingEnvironments.Azure: switch (Program.OSProperties.TargetOS) { case TargetOS.Ubuntu_16_04: // The primary Azure data drive is [/dev/sdb] so any mounted drive will be [/dev/sdc]. if (nodeDefinition.Azure.HardDriveCount == 0) { SetBashVariable(preprocessReader, "data.disk", "PRIMARY"); } else { SetBashVariable(preprocessReader, "data.disk", "/dev/sdc"); } break; default: throw new NotImplementedException($"Support for [{Program.OSProperties.TargetOS}] is not implemented."); } break; case HostingEnvironments.Google: throw new NotImplementedException("$todo(jeff.lill)"); case HostingEnvironments.HyperV: case HostingEnvironments.HyperVDev: case HostingEnvironments.Machine: case HostingEnvironments.Unknown: case HostingEnvironments.XenServer: // VMs for all of these environments simply host their data on the // primary OS disk only for now, the idea being that this disk // can be sized up as necessary. There are valid scenarios where // folks would like the data on a different drive (e.g. for better // performance). I'm putting support for that on the backlog. SetBashVariable(preprocessReader, "data.disk", "PRIMARY"); break; default: throw new NotImplementedException($"The [{hiveDefinition.Hosting.Environment}] hosting environment is not implemented."); } }
private static void NewMethod(PreprocessReader preprocessReader, string workerTimeSources) { SetBashVariable(preprocessReader, "ntp.worker.sources", workerTimeSources); }