/// <summary> /// Protected constructor (use Init function in general use to configure a module) for a new module. /// Defines all standard macros settings, etc /// </summary> // private so that the factory method must be used protected Module() { var graph = Graph.Instance; if (null == graph.BuildEnvironment) { throw new Exception("No build environment for module {0}", this.GetType().ToString()); } graph.AddModule(this); this.Macros = new MacroList(); // TODO: Can this be generalized to be a collection of files? this.GeneratedPaths = new System.Collections.Generic.Dictionary <PathKey, TokenizedString>(); // add the package root var packageNameSpace = graph.CommonModuleType.Peek().Namespace; var packageDefinition = graph.Packages.Where(item => item.Name == packageNameSpace).FirstOrDefault(); if (null == packageDefinition) { var includeTests = CommandLineProcessor.Evaluate(new Options.UseTests()); if (includeTests && packageNameSpace.EndsWith(".tests")) { packageNameSpace = packageNameSpace.Replace(".tests", string.Empty); packageDefinition = graph.Packages.Where(item => item.Name == packageNameSpace).FirstOrDefault(); } if (null == packageDefinition) { throw new Exception("Unable to locate package for namespace '{0}'", packageNameSpace); } } this.PackageDefinition = packageDefinition; this.Macros.AddVerbatim("packagedir", packageDefinition.GetPackageDirectory()); this.Macros.AddVerbatim("packagename", packageDefinition.Name); this.Macros.AddVerbatim("packagebuilddir", packageDefinition.GetBuildDirectory()); this.Macros.AddVerbatim("modulename", this.GetType().Name); this.Macros.Add("OutputName", this.Macros["modulename"]); this.OwningRank = null; this.Tool = null; this.MetaData = null; this.BuildEnvironment = graph.BuildEnvironment; this.Macros.AddVerbatim("config", this.BuildEnvironment.Configuration.ToString()); this.ReasonToExecute = ExecuteReasoning.Undefined(); }
AddDependentPackage() { var packageNameArgument = new Options.PackageName(); var packageName = CommandLineProcessor.Evaluate(packageNameArgument); if (null == packageName) { throw new Exception("No name was defined. Use {0} on the command line to specify it.", (packageNameArgument as ICommandLineArgument).LongName); } var packageVersion = CommandLineProcessor.Evaluate(new Options.PackageVersion()); var masterPackage = GetMasterPackage(); if (null != masterPackage.Dependents.FirstOrDefault(item => item.Item1 == packageName && item.Item2 == packageVersion)) { if (null != packageVersion) { throw new Exception("Package dependency {0}, version {1}, is already present", packageName, packageVersion); } else { throw new Exception("Package dependency {0} is already present", packageName); } } var newDepTuple = new System.Tuple <string, string, bool?>(packageName, packageVersion, null); masterPackage.Dependents.Add(newDepTuple); // TODO: this is unfortunate having to write the file in order to use it with IdentifyAllPackages masterPackage.Write(); // validate that the addition is ok try { PackageUtilities.IdentifyAllPackages(); } catch (Exception exception) { masterPackage.Dependents.Remove(newDepTuple); masterPackage.Write(); throw new Exception(exception, "Failed to add dependent. Are all necessary package repositories specified?"); } }
MakePackage() { var packageDir = Graph.Instance.ProcessState.WorkingDirectory; var bamDir = System.IO.Path.Combine(packageDir, BamSubFolder); if (System.IO.Directory.Exists(bamDir)) { throw new Exception("Cannot create new package: A Bam package already exists at {0}", packageDir); } var packageNameArgument = new Options.PackageName(); var packageName = CommandLineProcessor.Evaluate(packageNameArgument); if (null == packageName) { throw new Exception("Cannot create new package: No name was defined. Use {0} on the command line to specify it.", (packageNameArgument as ICommandLineArgument).LongName); } var packageVersion = CommandLineProcessor.Evaluate(new Options.PackageVersion()); var definition = new PackageDefinition(bamDir, packageName, packageVersion); IOWrapper.CreateDirectory(bamDir); definition.Write(); var scriptsDir = System.IO.Path.Combine(bamDir, ScriptsSubFolder); IOWrapper.CreateDirectory(scriptsDir); var initialScriptFile = System.IO.Path.Combine(scriptsDir, packageName) + ".cs"; using (System.IO.TextWriter writer = new System.IO.StreamWriter(initialScriptFile)) { writer.NewLine = "\n"; writer.WriteLine("using Bam.Core;"); writer.WriteLine("namespace {0}", packageName); writer.WriteLine("{"); writer.WriteLine(" // write modules here ..."); writer.WriteLine("}"); } Log.Info("Package {0} was successfully created at {1}", definition.FullName, packageDir); }
CreateTopLevelModules( System.Reflection.Assembly assembly, Environment env, string ns) { var includeTests = CommandLineProcessor.Evaluate(new Options.UseTests()); var allTypes = assembly.GetTypes(); var allModuleTypesInPackage = allTypes.Where(type => ((type.Namespace == ns) || (includeTests && (type.Namespace == ns + ".tests"))) && type.IsSubclassOf(typeof(Module))); if (0 == allModuleTypesInPackage.Count()) { throw new Exception("No modules found in the namespace '{0}'. Please define some modules in the build scripts to use {0} as a master package.", ns); } var allTopLevelModuleTypesInPackage = allModuleTypesInPackage.Where(type => type.IsSealed); if (0 == allTopLevelModuleTypesInPackage.Count()) { var message = new System.Text.StringBuilder(); message.AppendFormat("No top-level modules found in the namespace '{0}'. Please mark some of the modules below as 'sealed' to identify them as top-level, and thus buildable when {0} is the master package:", ns); message.AppendLine(); foreach (var moduleType in allModuleTypesInPackage) { message.AppendFormat("\t{0}", moduleType.ToString()); message.AppendLine(); } throw new Exception(message.ToString()); } try { this.CreateTopLevelModuleFromTypes(allTopLevelModuleTypesInPackage, env); } catch (Exception ex) { throw new Exception(ex, "An error occurred creating top-level modules in namespace '{0}':", ns); } }
CompilePackageAssembly( bool enforceBamAssemblyVersions = true, bool enableClean = true) { // validate build root if (null == Graph.Instance.BuildRoot) { throw new Exception("Build root has not been specified"); } var gatherSourceProfile = new TimeProfile(ETimingProfiles.GatherSource); gatherSourceProfile.StartProfile(); IdentifyAllPackages(enforceBamAssemblyVersions: enforceBamAssemblyVersions); var cleanFirst = CommandLineProcessor.Evaluate(new Options.CleanFirst()); if (enableClean && cleanFirst && System.IO.Directory.Exists(Graph.Instance.BuildRoot)) { Log.Info("Deleting build root '{0}'", Graph.Instance.BuildRoot); try { // make sure no files are read-only, which may have happened as part of collation preserving file attributes var dirInfo = new System.IO.DirectoryInfo(Graph.Instance.BuildRoot); foreach (var file in dirInfo.EnumerateFiles("*", System.IO.SearchOption.AllDirectories)) { file.Attributes &= ~System.IO.FileAttributes.ReadOnly; } System.IO.Directory.Delete(Graph.Instance.BuildRoot, true); } catch (System.IO.IOException ex) { Log.Info("Failed to delete build root, because {0}. Continuing", ex.Message); } } BuildModeUtilities.ValidateBuildModePackage(); var definitions = new StringArray(); // gather source files var sourceCode = new StringArray(); int packageIndex = 0; foreach (var package in Graph.Instance.Packages) { Log.DebugMessage("{0}: '{1}' @ '{2}'", packageIndex, package.Version, (package.PackageRepositories.Count > 0) ? package.PackageRepositories[0] : "Not in a repository"); // to compile with debug information, you must compile the files // to compile without, we need to file contents to hash the source if (Graph.Instance.CompileWithDebugSymbols) { var scripts = package.GetScriptFiles(); sourceCode.AddRange(scripts); Log.DebugMessage(scripts.ToString("\n\t")); } else { foreach (var scriptFile in package.GetScriptFiles()) { using (var reader = new System.IO.StreamReader(scriptFile)) { sourceCode.Add(reader.ReadToEnd()); } Log.DebugMessage("\t'{0}'", scriptFile); } } foreach (var define in package.Definitions) { if (!definitions.Contains(define)) { definitions.Add(define); } } ++packageIndex; } // add/remove other definitions definitions.Add(VersionDefineForCompiler); definitions.Add(HostPlatformDefineForCompiler); definitions.Sort(); gatherSourceProfile.StopProfile(); var assemblyCompileProfile = new TimeProfile(ETimingProfiles.AssemblyCompilation); assemblyCompileProfile.StartProfile(); // assembly is written to the build root var cachedAssemblyPathname = System.IO.Path.Combine(Graph.Instance.BuildRoot, ".CachedPackageAssembly"); cachedAssemblyPathname = System.IO.Path.Combine(cachedAssemblyPathname, Graph.Instance.MasterPackage.Name) + ".dll"; var hashPathName = System.IO.Path.ChangeExtension(cachedAssemblyPathname, "hash"); string thisHashCode = null; var cacheAssembly = !CommandLineProcessor.Evaluate(new Options.DisableCacheAssembly()); string compileReason = null; if (Graph.Instance.CompileWithDebugSymbols) { compileReason = "debug symbols were enabled"; } else { // can an existing assembly be reused? thisHashCode = GetPackageHash(sourceCode, definitions, Graph.Instance.MasterPackage.BamAssemblies); if (cacheAssembly) { if (System.IO.File.Exists(hashPathName)) { using (var reader = new System.IO.StreamReader(hashPathName)) { var diskHashCode = reader.ReadLine(); if (diskHashCode.Equals(thisHashCode)) { Log.DebugMessage("Cached assembly used '{0}', with hash {1}", cachedAssemblyPathname, diskHashCode); Log.Detail("Re-using existing package assembly"); Graph.Instance.ScriptAssemblyPathname = cachedAssemblyPathname; assemblyCompileProfile.StopProfile(); return; } else { compileReason = "package source has changed since the last compile"; } } } else { compileReason = "no previously compiled package assembly exists"; } } else { compileReason = "user has disabled package assembly caching"; } } // use the compiler in the current runtime version to build the assembly of packages var clrVersion = System.Environment.Version; var compilerVersion = System.String.Format("v{0}.{1}", clrVersion.Major, clrVersion.Minor); Log.Detail("Compiling package assembly (C# compiler {0}{1}), because {2}.", compilerVersion, Graph.Instance.ProcessState.TargetFrameworkVersion != null ? (", targetting " + Graph.Instance.ProcessState.TargetFrameworkVersion) : string.Empty, compileReason); var providerOptions = new System.Collections.Generic.Dictionary <string, string>(); providerOptions.Add("CompilerVersion", compilerVersion); if (Graph.Instance.ProcessState.RunningMono) { Log.DebugMessage("Compiling assembly for Mono"); } using (var provider = new Microsoft.CSharp.CSharpCodeProvider(providerOptions)) { var compilerParameters = new System.CodeDom.Compiler.CompilerParameters(); compilerParameters.TreatWarningsAsErrors = true; compilerParameters.WarningLevel = 4; compilerParameters.GenerateExecutable = false; compilerParameters.GenerateInMemory = false; if (Graph.Instance.CompileWithDebugSymbols) { compilerParameters.OutputAssembly = System.IO.Path.Combine(System.IO.Path.GetTempPath(), Graph.Instance.MasterPackage.Name) + ".dll"; } else { compilerParameters.OutputAssembly = cachedAssemblyPathname; } var compilerOptions = "/checked+ /unsafe-"; if (Graph.Instance.CompileWithDebugSymbols) { compilerParameters.IncludeDebugInformation = true; compilerOptions += " /optimize-"; } else { compilerOptions += " /optimize+"; } compilerOptions += " /platform:anycpu"; // define strings compilerOptions += " /define:" + definitions.ToString(';'); compilerParameters.CompilerOptions = compilerOptions; if (provider.Supports(System.CodeDom.Compiler.GeneratorSupport.Resources)) { // Bam assembly // TODO: Q: why is it only for the master package? Why not all of them, which may have additional dependencies? foreach (var assembly in Graph.Instance.MasterPackage.BamAssemblies) { var assemblyFileName = System.String.Format("{0}.dll", assembly.Name); var assemblyPathName = System.IO.Path.Combine(Graph.Instance.ProcessState.ExecutableDirectory, assemblyFileName); compilerParameters.ReferencedAssemblies.Add(assemblyPathName); } // DotNet assembly foreach (var desc in Graph.Instance.MasterPackage.DotNetAssemblies) { var assemblyFileName = System.String.Format("{0}.dll", desc.Name); compilerParameters.ReferencedAssemblies.Add(assemblyFileName); } if (Graph.Instance.ProcessState.RunningMono) { compilerParameters.ReferencedAssemblies.Add("Mono.Posix.dll"); } } else { throw new Exception("C# compiler does not support Resources"); } // this will create the build root directory as necessary IOWrapper.CreateDirectory(System.IO.Path.GetDirectoryName(compilerParameters.OutputAssembly)); var results = Graph.Instance.CompileWithDebugSymbols ? provider.CompileAssemblyFromFile(compilerParameters, sourceCode.ToArray()) : provider.CompileAssemblyFromSource(compilerParameters, sourceCode.ToArray()); if (results.Errors.HasErrors || results.Errors.HasWarnings) { var message = new System.Text.StringBuilder(); message.AppendFormat("Failed to compile package '{0}'. There are {1} errors.", Graph.Instance.MasterPackage.FullName, results.Errors.Count); message.AppendLine(); foreach (System.CodeDom.Compiler.CompilerError error in results.Errors) { message.AppendFormat("\t{0}({1}): {2} {3}", error.FileName, error.Line, error.ErrorNumber, error.ErrorText); message.AppendLine(); } if (!Graph.Instance.CompileWithDebugSymbols) { message.AppendLine(); ICommandLineArgument debugOption = new Options.UseDebugSymbols(); message.AppendFormat("Use the {0}/{1} command line option with bam for more accurate error messages.", debugOption.LongName, debugOption.ShortName); message.AppendLine(); } message.AppendLine(); ICommandLineArgument createDebugProjectOption = new Options.CreateDebugProject(); message.AppendFormat("Use the {0}/{1} command line option with bam to create an editable IDE project containing the build scripts.", createDebugProjectOption.LongName, createDebugProjectOption.ShortName); message.AppendLine(); throw new Exception(message.ToString()); } if (!Graph.Instance.CompileWithDebugSymbols) { if (cacheAssembly) { using (var writer = new System.IO.StreamWriter(hashPathName)) { writer.WriteLine(thisHashCode); } } else { // will not throw if the file doesn't exist System.IO.File.Delete(hashPathName); } } Log.DebugMessage("Written assembly to '{0}'", compilerParameters.OutputAssembly); Graph.Instance.ScriptAssemblyPathname = compilerParameters.OutputAssembly; } assemblyCompileProfile.StopProfile(); }
IdentifyAllPackages( bool allowDuplicates = false, bool enforceBamAssemblyVersions = true) { var packageRepos = new System.Collections.Generic.LinkedList <System.Tuple <string, PackageDefinition> >(); foreach (var repo in Graph.Instance.PackageRepositories) { EnqueuePackageRepositoryToVisit(packageRepos, repo, null); } var masterDefinitionFile = GetMasterPackage(); foreach (var repo in masterDefinitionFile.PackageRepositories) { EnqueuePackageRepositoryToVisit(packageRepos, repo, masterDefinitionFile); } // read the definition files of any package found in the package roots var candidatePackageDefinitions = new Array <PackageDefinition>(); candidatePackageDefinitions.Add(masterDefinitionFile); while (packageRepos.Count > 0) { var repoTuple = packageRepos.First(); packageRepos.RemoveFirst(); var repo = repoTuple.Item1; if (!System.IO.Directory.Exists(repo)) { var message = new System.Text.StringBuilder(); message.AppendFormat("Package repository directory {0} does not exist.", repo); message.AppendLine(); message.AppendFormat("Repository requested from {0}", repoTuple.Item2.XMLFilename); message.AppendLine(); throw new Exception(message.ToString()); } var candidatePackageDirs = System.IO.Directory.GetDirectories(repo, BamSubFolder, System.IO.SearchOption.AllDirectories); Graph.Instance.PackageRepositories.Add(repo); foreach (var bamDir in candidatePackageDirs) { var packageDir = System.IO.Path.GetDirectoryName(bamDir); var packageDefinitionPath = GetPackageDefinitionPathname(packageDir); // ignore any duplicates (can be found due to nested repositories) if (null != candidatePackageDefinitions.FirstOrDefault(item => item.XMLFilename == packageDefinitionPath)) { continue; } var definitionFile = new PackageDefinition(packageDefinitionPath); definitionFile.Read(); candidatePackageDefinitions.Add(definitionFile); foreach (var newRepo in definitionFile.PackageRepositories) { EnqueuePackageRepositoryToVisit(packageRepos, newRepo, definitionFile); } } } // defaults come from // - the master definition file // - command line args (these trump the mdf) // and only requires resolving when referenced var packageDefinitions = new Array <PackageDefinition>(); PackageDefinition.ResolveDependencies(masterDefinitionFile, packageDefinitions, candidatePackageDefinitions); // now resolve any duplicate names using defaults // unless duplicates are allowed var duplicatePackageNames = packageDefinitions.GroupBy(item => item.Name).Where(item => item.Count() > 1).Select(item => item.Key); var uniquePackageNames = packageDefinitions.GroupBy(item => item.Name).Where(item => item.Count() == 1).Select(item => item.Key); var versionSpeciferArgs = new Options.PackageDefaultVersion(); var packageVersionSpecifiers = CommandLineProcessor.Evaluate(versionSpeciferArgs); if ((duplicatePackageNames.Count() > 0) && !allowDuplicates) { var toRemove = new Array <PackageDefinition>(); foreach (var dupName in duplicatePackageNames) { var duplicates = packageDefinitions.Where(item => item.Name == dupName); var resolvedDuplicate = TryToResolveDuplicate(masterDefinitionFile, dupName, duplicates, packageDefinitions, packageVersionSpecifiers, toRemove); if (null != resolvedDuplicate) { continue; } // try removing any packages that have already been resolved // which can remove additional packages (recursive check) because they had been added solely by those we are just about to remove packageDefinitions.RemoveAll(PackagesToRemove(toRemove, packageDefinitions, masterDefinitionFile)); packageDefinitions.RemoveAll(toRemove); // and if that has reduced the duplicates for this package down to a single version, we're good to carry on var numDuplicates = duplicates.Count(); if (1 == numDuplicates) { toRemove.Clear(); continue; } // otherwise, error var resolveErrorMessage = new System.Text.StringBuilder(); if (numDuplicates > 0) { resolveErrorMessage.AppendFormat("Unable to resolve to a single version of package {0}. Use --{0}.version=<version> to resolve.", dupName); resolveErrorMessage.AppendLine(); resolveErrorMessage.AppendLine("Available versions of the package are:"); foreach (var dup in duplicates) { resolveErrorMessage.AppendFormat("\t{0}", dup.Version); resolveErrorMessage.AppendLine(); } } else { resolveErrorMessage.AppendFormat("No version of package {0} has been determined to be available.", dupName); resolveErrorMessage.AppendLine(); if (toRemove.Count() > 0) { resolveErrorMessage.AppendFormat("If there were any references to {0}, they may have been removed from consideration by the following packages being discarded:", dupName); resolveErrorMessage.AppendLine(); foreach (var removed in toRemove) { resolveErrorMessage.AppendFormat("\t{0}", removed.FullName); resolveErrorMessage.AppendLine(); } } resolveErrorMessage.AppendFormat("Please add an explicit dependency to (a version of) the {0} package either in your master package or one of its dependencies.", dupName); resolveErrorMessage.AppendLine(); } throw new Exception(resolveErrorMessage.ToString()); } // finally, clean up the package definition list to use, with all those that need to be deleted packageDefinitions.RemoveAll(toRemove); } // ensure that all packages with a single version in the definition files, does not have a command line override // that refers to a completely different version foreach (var uniquePkgName in uniquePackageNames) { foreach (var versionSpecifier in packageVersionSpecifiers) { if (!versionSpecifier.Contains(uniquePkgName)) { continue; } var versionFromDefinition = packageDefinitions.First(item => item.Name == uniquePkgName).Version; if (versionSpecifier[1] != versionFromDefinition) { var noMatchMessage = new System.Text.StringBuilder(); noMatchMessage.AppendFormat("Command line version specified, {0}, could not resolve to one of the available versions of package {1}:", versionSpecifier[1], uniquePkgName); noMatchMessage.AppendLine(); noMatchMessage.AppendFormat("\t{0}", versionFromDefinition); noMatchMessage.AppendLine(); throw new Exception(noMatchMessage.ToString()); } } } if (enforceBamAssemblyVersions) { // for all packages that make up this assembly, ensure that their requirements on the version of the Bam // assemblies are upheld, prior to compiling the code foreach (var pkgDefn in packageDefinitions) { pkgDefn.ValidateBamAssemblyRequirements(); } } Graph.Instance.SetPackageDefinitions(packageDefinitions); }
Execute( Array <Environment> environments, System.Reflection.Assembly packageAssembly = null) { PrintVersion(); if (0 == environments.Count) { throw new Exception("No build configurations were specified"); } var graph = Graph.Instance; if (null != packageAssembly) { PackageUtilities.IdentifyAllPackages(); graph.ScriptAssembly = packageAssembly; graph.ScriptAssemblyPathname = packageAssembly.Location; } else { PackageUtilities.CompilePackageAssembly(); PackageUtilities.LoadPackageAssembly(); } var packageMetaDataProfile = new TimeProfile(ETimingProfiles.PackageMetaData); packageMetaDataProfile.StartProfile(); // validate that there is at most one local policy // if test mode is enabled, then the '.tests' sub-namespaces are also checked { var localPolicies = graph.ScriptAssembly.GetTypes().Where(t => typeof(ISitePolicy).IsAssignableFrom(t)); var includeTests = CommandLineProcessor.Evaluate(new Options.UseTests()); if (!includeTests) { localPolicies = localPolicies.Where(item => !item.Namespace.EndsWith(".tests")); } var numLocalPolicies = localPolicies.Count(); if (numLocalPolicies > 0) { if (numLocalPolicies > 1) { var message = new System.Text.StringBuilder(); message.AppendLine("Too many site policies exist in the package assembly:"); foreach (var policy in localPolicies) { message.AppendFormat("\t{0}", policy.ToString()); message.AppendLine(); } throw new Exception(message.ToString()); } Settings.LocalPolicy = System.Activator.CreateInstance(localPolicies.First()) as ISitePolicy; } } // find a product definition { var productDefinitions = graph.ScriptAssembly.GetTypes().Where(t => typeof(IProductDefinition).IsAssignableFrom(t)); var numProductDefinitions = productDefinitions.Count(); if (numProductDefinitions > 0) { if (numProductDefinitions > 1) { var message = new System.Text.StringBuilder(); message.AppendLine("Too many product definitions exist in the package assembly:"); foreach (var def in productDefinitions) { message.AppendFormat("\t{0}", def.ToString()); message.AppendLine(); } throw new Exception(message.ToString()); } graph.ProductDefinition = System.Activator.CreateInstance(productDefinitions.First()) as IProductDefinition; } } // get the metadata from the build mode package var metaName = System.String.Format("{0}Builder.{0}Meta", graph.Mode); var metaDataType = graph.ScriptAssembly.GetType(metaName); if (null == metaDataType) { throw new Exception("No build mode {0} meta data type {1}", graph.Mode, metaName); } if (!typeof(IBuildModeMetaData).IsAssignableFrom(metaDataType)) { throw new Exception("Build mode package meta data type {0} does not implement the interface {1}", metaDataType.ToString(), typeof(IBuildModeMetaData).ToString()); } graph.BuildModeMetaData = System.Activator.CreateInstance(metaDataType) as IBuildModeMetaData; // packages can have meta data - instantiate where they exist foreach (var package in graph.Packages) { var ns = package.Name; var metaType = graph.ScriptAssembly.GetTypes().FirstOrDefault(item => item.Namespace == ns && typeof(PackageMetaData).IsAssignableFrom(item)); if (null == metaType) { continue; } try { package.MetaData = System.Activator.CreateInstance(metaType) as PackageMetaData; } catch (Exception exception) { throw exception; } catch (System.Reflection.TargetInvocationException exception) { throw new Exception(exception, "Failed to create package metadata"); } } packageMetaDataProfile.StopProfile(); var topLevelNamespace = graph.MasterPackage.Name; var findBuildableModulesProfile = new TimeProfile(ETimingProfiles.IdentifyBuildableModules); findBuildableModulesProfile.StartProfile(); // Phase 1: Instantiate all modules in the namespace of the package in which the tool was invoked Log.Detail("Creating modules"); foreach (var env in environments) { graph.CreateTopLevelModules(graph.ScriptAssembly, env, topLevelNamespace); } findBuildableModulesProfile.StopProfile(); var populateGraphProfile = new TimeProfile(ETimingProfiles.PopulateGraph); populateGraphProfile.StartProfile(); // Phase 2: Graph now has a linear list of modules; create a dependency graph // NB: all those modules with 0 dependees are the top-level modules // NB: default settings have already been defined here // not only does this generate the dependency graph, but also creates the default settings for each module, and completes them graph.SortDependencies(); populateGraphProfile.StopProfile(); // TODO: make validation optional, if it starts showing on profiles var validateGraphProfile = new TimeProfile(ETimingProfiles.ValidateGraph); validateGraphProfile.StartProfile(); graph.Validate(); validateGraphProfile.StopProfile(); // Phase 3: (Create default settings, and ) apply patches (build + shared) to each module // NB: some builders can use the patch directly for child objects, so this may be dependent upon the builder // Toolchains for modules need to be set here, as they might append macros into each module in order to evaluate paths // TODO: a parallel thread can be spawned here, that can check whether command lines have changed // the Settings object can be inspected, and a hash generated. This hash can be written to disk, and compared. // If a 'verbose' mode is enabled, then more work can be done to figure out what has changed. This would also require // serializing the binary Settings object var createPatchesProfile = new TimeProfile(ETimingProfiles.CreatePatches); createPatchesProfile.StartProfile(); graph.ApplySettingsPatches(); createPatchesProfile.StopProfile(); // expand paths after patching settings, because some of the patches may contain tokenized strings // TODO: a thread can be spawned, to check for whether files were in date or not, which will // be ready in time for graph execution var parseStringsProfile = new TimeProfile(ETimingProfiles.ParseTokenizedStrings); parseStringsProfile.StartProfile(); TokenizedString.ParseAll(); parseStringsProfile.StopProfile(); if (CommandLineProcessor.Evaluate(new Options.ViewDependencyGraph())) { // must come after all strings are parsed, in order to display useful paths graph.Dump(); } // Phase 4: Execute dependency graph // N.B. all paths (including those with macros) have been delayed expansion until now var graphExecutionProfile = new TimeProfile(ETimingProfiles.GraphExecution); graphExecutionProfile.StartProfile(); var executor = new Executor(); executor.Run(); graphExecutionProfile.StopProfile(); }
Run() { Log.Detail("Running build..."); // TODO: should the rank collections be sorted, so that modules with fewest dependencies are first? var graph = Graph.Instance; var metaDataType = graph.BuildModeMetaData.GetType(); var useEvaluation = CheckIfModulesNeedRebuilding(metaDataType); var explainRebuild = CommandLineProcessor.Evaluate(new Options.ExplainBuildReason()); var immediateOutput = CommandLineProcessor.Evaluate(new Options.ImmediateOutput()); ExecutePreBuild(metaDataType); // necessary if built with debug symbols IOWrapper.CreateDirectoryIfNotExists(graph.BuildRoot); var threadCount = CommandLineProcessor.Evaluate(new Options.MultiThreaded()); if (0 == threadCount) { threadCount = System.Environment.ProcessorCount; } System.Exception abortException = null; if (threadCount > 1) { using (var cancellationSource = new System.Threading.CancellationTokenSource()) { var cancellationToken = cancellationSource.Token; // LongRunning is absolutely necessary in order to achieve paralleism var creationOpts = System.Threading.Tasks.TaskCreationOptions.LongRunning; var continuationOpts = System.Threading.Tasks.TaskContinuationOptions.LongRunning; var scheduler = new LimitedConcurrencyLevelTaskScheduler(threadCount); var factory = new System.Threading.Tasks.TaskFactory( cancellationToken, creationOpts, continuationOpts, scheduler); var tasks = new Array <System.Threading.Tasks.Task>(); foreach (var rank in graph.Reverse()) { foreach (var module in rank) { var context = new ExecutionContext(useEvaluation, explainRebuild, immediateOutput); var task = factory.StartNew(() => { if (cancellationToken.IsCancellationRequested) { return; } var depTasks = new Array <System.Threading.Tasks.Task>(); foreach (var dep in module.Dependents) { if (null == dep.ExecutionTask) { continue; } depTasks.Add(dep.ExecutionTask); } foreach (var dep in module.Requirements) { if (null == dep.ExecutionTask) { continue; } depTasks.Add(dep.ExecutionTask); } System.Threading.Tasks.Task.WaitAll(depTasks.ToArray()); if (cancellationToken.IsCancellationRequested) { return; } try { (module as IModuleExecution).Execute(context); } catch (Exception ex) { abortException = ex; cancellationSource.Cancel(); } finally { if (context.OutputStringBuilder != null && context.OutputStringBuilder.Length > 0) { Log.Info(context.OutputStringBuilder.ToString()); } if (context.ErrorStringBuilder != null && context.ErrorStringBuilder.Length > 0) { Log.Info(context.ErrorStringBuilder.ToString()); } } }); tasks.Add(task); module.ExecutionTask = task; } } try { System.Threading.Tasks.Task.WaitAll(tasks.ToArray()); } catch (System.AggregateException exception) { if (!(exception.InnerException is System.Threading.Tasks.TaskCanceledException)) { throw new Exception(exception, "Error during threaded build"); } } } } else { foreach (var rank in graph.Reverse()) { if (null != abortException) { break; } foreach (IModuleExecution module in rank) { var context = new ExecutionContext(useEvaluation, explainRebuild, immediateOutput); try { module.Execute(context); } catch (Exception ex) { abortException = ex; break; } finally { if (context.OutputStringBuilder != null && context.OutputStringBuilder.Length > 0) { Log.Info(context.OutputStringBuilder.ToString()); } if (context.ErrorStringBuilder != null && context.ErrorStringBuilder.Length > 0) { Log.Info(context.ErrorStringBuilder.ToString()); } } } } } if (null != abortException) { throw new Exception(abortException, "Error during {0}threaded build", (threadCount > 1) ? string.Empty : "non-"); } ExecutePostBuild(metaDataType); }
IdentifyAllPackages( bool allowDuplicates = false, bool enforceBamAssemblyVersions = true) { var packageRepos = new System.Collections.Generic.Queue <string>(); foreach (var repo in Graph.Instance.PackageRepositories) { if (packageRepos.Contains(repo)) { continue; } packageRepos.Enqueue(repo); } var masterDefinitionFile = GetMasterPackage(enforceBamAssemblyVersions: enforceBamAssemblyVersions); foreach (var repo in masterDefinitionFile.PackageRepositories) { if (packageRepos.Contains(repo)) { continue; } packageRepos.Enqueue(repo); } // read the definition files of any package found in the package roots var candidatePackageDefinitions = new Array <PackageDefinition>(); candidatePackageDefinitions.Add(masterDefinitionFile); while (packageRepos.Count > 0) { var repo = packageRepos.Dequeue(); if (!System.IO.Directory.Exists(repo)) { throw new Exception("Package repository directory {0} does not exist", repo); } var candidatePackageDirs = System.IO.Directory.GetDirectories(repo, BamSubFolder, System.IO.SearchOption.AllDirectories); Graph.Instance.PackageRepositories.Add(repo); foreach (var bamDir in candidatePackageDirs) { var packageDir = System.IO.Path.GetDirectoryName(bamDir); var packageDefinitionPath = GetPackageDefinitionPathname(packageDir); // ignore any duplicates (can be found due to nested repositories) if (null != candidatePackageDefinitions.Where(item => item.XMLFilename == packageDefinitionPath).FirstOrDefault()) { continue; } var definitionFile = new PackageDefinition(packageDefinitionPath, !Graph.Instance.ForceDefinitionFileUpdate); definitionFile.Read(true, enforceBamAssemblyVersions); candidatePackageDefinitions.Add(definitionFile); foreach (var newRepo in definitionFile.PackageRepositories) { if (Graph.Instance.PackageRepositories.Contains(newRepo)) { continue; } packageRepos.Enqueue(newRepo); } } } // defaults come from // - the master definition file // - command line args (these trump the mdf) // and only requires resolving when referenced var packageDefinitions = new Array <PackageDefinition>(); PackageDefinition.ResolveDependencies(masterDefinitionFile, packageDefinitions, candidatePackageDefinitions); // now resolve any duplicate names using defaults // unless duplicates are allowed var duplicatePackageNames = packageDefinitions.GroupBy(item => item.Name).Where(item => item.Count() > 1).Select(item => item.Key); if ((duplicatePackageNames.Count() > 0) && !allowDuplicates) { var versionSpeciferArgs = new Options.PackageDefaultVersion(); var packageVersionSpecifiers = CommandLineProcessor.Evaluate(versionSpeciferArgs); var toRemove = new Array <PackageDefinition>(); foreach (var dupName in duplicatePackageNames) { var duplicates = packageDefinitions.Where(item => item.Name == dupName); PackageDefinition resolvedDuplicate = null; // command line specifications take precedence to resolve a duplicate foreach (var specifier in packageVersionSpecifiers) { if (!specifier.Contains(dupName)) { continue; } foreach (var dupPackage in duplicates) { if (specifier[1] == dupPackage.Version) { resolvedDuplicate = dupPackage; break; } } if (resolvedDuplicate != null) { break; } var noMatchMessage = new System.Text.StringBuilder(); noMatchMessage.AppendFormat("Command line version specified, {0}, could not resolve to one of the available versions of package {1}:", specifier[1], duplicates.First().Name); noMatchMessage.AppendLine(); foreach (var dup in duplicates) { noMatchMessage.AppendFormat("\t{0}", dup.Version); noMatchMessage.AppendLine(); } throw new Exception(noMatchMessage.ToString()); } if (resolvedDuplicate != null) { toRemove.AddRange(packageDefinitions.Where(item => (item.Name == dupName) && (item != resolvedDuplicate))); continue; } // now look at the master dependency file, for any 'default' specifications var masterDependency = masterDefinitionFile.Dependents.Where(item => item.Item1 == dupName && item.Item3.HasValue && item.Item3.Value).FirstOrDefault(); if (null != masterDependency) { toRemove.AddRange(packageDefinitions.Where(item => (item.Name == dupName) && (item.Version != masterDependency.Item2))); continue; } var resolveErrorMessage = new System.Text.StringBuilder(); resolveErrorMessage.AppendFormat("Unable to resolve to a single version of package {0}. Use --{0}.version=<version> to resolve. Available versions of the package are:", duplicates.First().Name); resolveErrorMessage.AppendLine(); foreach (var dup in duplicates) { resolveErrorMessage.AppendFormat("\t{0}", dup.Version); resolveErrorMessage.AppendLine(); } throw new Exception(resolveErrorMessage.ToString()); } packageDefinitions.RemoveAll(toRemove); } Graph.Instance.SetPackageDefinitions(packageDefinitions); }
IdentifyAllPackages( bool allowDuplicates = false, bool enforceBamAssemblyVersions = true) { var packageRepos = new System.Collections.Generic.LinkedList <System.Tuple <string, PackageDefinition> >(); int reposHWM = 0; foreach (var repo in Graph.Instance.PackageRepositories) { EnqueuePackageRepositoryToVisit(packageRepos, ref reposHWM, repo, null); } var masterDefinitionFile = GetMasterPackage(); // inject any packages from the command line into the master definition file // and these will be defaults var injectPackages = CommandLineProcessor.Evaluate(new Options.InjectDefaultPackage()); if (null != injectPackages) { foreach (var injected in injectPackages) { var name = injected[0]; string version = null; if (injected.Count > 1) { version = injected[1].TrimStart(new [] { '-' }); // see regex in InjectDefaultPackage } var is_default = true; var dependent = new System.Tuple <string, string, bool?>(name, version, is_default); masterDefinitionFile.Dependents.AddUnique(dependent); } } foreach (var repo in masterDefinitionFile.PackageRepositories) { EnqueuePackageRepositoryToVisit(packageRepos, ref reposHWM, repo, masterDefinitionFile); } // read the definition files of any package found in the package roots var candidatePackageDefinitions = new Array <PackageDefinition>(); candidatePackageDefinitions.Add(masterDefinitionFile); var packageReposVisited = 0; Log.Detail("Querying package repositories..."); while (packageRepos.Count > 0) { var repoTuple = packageRepos.First(); packageRepos.RemoveFirst(); var repo = repoTuple.Item1; if (!System.IO.Directory.Exists(repo)) { var message = new System.Text.StringBuilder(); message.AppendFormat("Package repository directory {0} does not exist.", repo); message.AppendLine(); message.AppendFormat("Repository requested from {0}", repoTuple.Item2.XMLFilename); message.AppendLine(); throw new Exception(message.ToString()); } // faster than System.IO.Directory.GetDirectories(repo, BamSubFolder, System.IO.SearchOption.AllDirectories); // when there are deep directories StringArray candidatePackageDirs = new StringArray(); var possiblePackages = System.IO.Directory.GetDirectories(repo, "*", System.IO.SearchOption.TopDirectoryOnly); foreach (var packageDir in possiblePackages) { var possibleBamFolder = System.IO.Path.Combine(packageDir, BamSubFolder); if (System.IO.Directory.Exists(possibleBamFolder)) { candidatePackageDirs.Add(packageDir); } } Graph.Instance.PackageRepositories.Add(repo); foreach (var packageDir in candidatePackageDirs) { var packageDefinitionPath = GetPackageDefinitionPathname(packageDir); // ignore any duplicates (can be found due to nested repositories) if (null != candidatePackageDefinitions.FirstOrDefault(item => item.XMLFilename == packageDefinitionPath)) { continue; } var definitionFile = new PackageDefinition(packageDefinitionPath); definitionFile.Read(); candidatePackageDefinitions.Add(definitionFile); foreach (var newRepo in definitionFile.PackageRepositories) { EnqueuePackageRepositoryToVisit(packageRepos, ref reposHWM, newRepo, definitionFile); } } ++packageReposVisited; Log.DetailProgress("{0,3}%", (int)(100 * ((float)packageReposVisited / reposHWM))); } #if DEBUG if (packageReposVisited != reposHWM) { throw new Exception("Inconsistent package repository count: {0} added, {1} visited", reposHWM, packageReposVisited); } #endif // defaults come from // - the master definition file // - command line args (these trump the mdf) // and only requires resolving when referenced var packageDefinitions = new Array <PackageDefinition>(); PackageDefinition.ResolveDependencies(masterDefinitionFile, packageDefinitions, candidatePackageDefinitions); // now resolve any duplicate names using defaults // unless duplicates are allowed var duplicatePackageNames = packageDefinitions.GroupBy(item => item.Name).Where(item => item.Count() > 1).Select(item => item.Key); var uniquePackageNames = packageDefinitions.GroupBy(item => item.Name).Where(item => item.Count() == 1).Select(item => item.Key); var versionSpeciferArgs = new Options.PackageDefaultVersion(); var packageVersionSpecifiers = CommandLineProcessor.Evaluate(versionSpeciferArgs); if ((duplicatePackageNames.Count() > 0) && !allowDuplicates) { foreach (var dupName in duplicatePackageNames) { Log.DebugMessage("Duplicate '{0}'; total packages {1}", dupName, packageDefinitions.Count); var duplicates = packageDefinitions.Where(item => item.Name == dupName); var toRemove = new Array <PackageDefinition>(); var resolvedDuplicate = TryToResolveDuplicate(masterDefinitionFile, dupName, duplicates, packageDefinitions, packageVersionSpecifiers, toRemove); Log.DebugMessage("Attempting to remove:\n\t{0}", toRemove.ToString("\n\t")); // try removing any packages that have already been resolved // which, in turn, can remove additional packages that have become orphaned by other removals packageDefinitions.RemoveAll(FindPackagesToRemove(toRemove, packageDefinitions, masterDefinitionFile)); if (null != resolvedDuplicate) { continue; } // and if that has reduced the duplicates for this package down to a single version, we're good to carry on var numDuplicates = duplicates.Count(); // this is LINQ, so it's 'live' if (1 == numDuplicates) { continue; } // otherwise, error var resolveErrorMessage = new System.Text.StringBuilder(); if (numDuplicates > 0) { resolveErrorMessage.AppendFormat("Unable to resolve to a single version of package {0}. Use --{0}.version=<version> to resolve.", dupName); resolveErrorMessage.AppendLine(); resolveErrorMessage.AppendLine("Available versions of the package are:"); foreach (var dup in duplicates) { resolveErrorMessage.AppendFormat("\t{0}", dup.Version); resolveErrorMessage.AppendLine(); } } else { resolveErrorMessage.AppendFormat("No version of package {0} has been determined to be available.", dupName); resolveErrorMessage.AppendLine(); if (toRemove.Count() > 0) { resolveErrorMessage.AppendFormat("If there were any references to {0}, they may have been removed from consideration by the following packages being discarded:", dupName); resolveErrorMessage.AppendLine(); foreach (var removed in toRemove) { resolveErrorMessage.AppendFormat("\t{0}", removed.FullName); resolveErrorMessage.AppendLine(); } } resolveErrorMessage.AppendFormat("Please add an explicit dependency to (a version of) the {0} package either in your master package or one of its dependencies.", dupName); resolveErrorMessage.AppendLine(); } throw new Exception(resolveErrorMessage.ToString()); } } // ensure that all packages with a single version in the definition files, does not have a command line override // that refers to a completely different version foreach (var uniquePkgName in uniquePackageNames) { foreach (var versionSpecifier in packageVersionSpecifiers) { if (!versionSpecifier.Contains(uniquePkgName)) { continue; } var versionFromDefinition = packageDefinitions.First(item => item.Name == uniquePkgName).Version; if (versionSpecifier[1] != versionFromDefinition) { var noMatchMessage = new System.Text.StringBuilder(); noMatchMessage.AppendFormat("Command line version specified, {0}, could not resolve to one of the available versions of package {1}:", versionSpecifier[1], uniquePkgName); noMatchMessage.AppendLine(); noMatchMessage.AppendFormat("\t{0}", versionFromDefinition); noMatchMessage.AppendLine(); throw new Exception(noMatchMessage.ToString()); } } } if (enforceBamAssemblyVersions) { // for all packages that make up this assembly, ensure that their requirements on the version of the Bam // assemblies are upheld, prior to compiling the code foreach (var pkgDefn in packageDefinitions) { pkgDefn.ValidateBamAssemblyRequirements(); } } Graph.Instance.SetPackageDefinitions(packageDefinitions); }