private CompositeGraphFingerprint GenerateRandomTopLevelHash(string configPath, string index, bool flag) { var context1 = BuildXLContext.CreateInstanceForTesting(); var configuration1 = ConfigurationHelpers.GetDefaultForTesting(context1.PathTable, AbsolutePath.Create(context1.PathTable, configPath)); var evaluationFilter1 = new EvaluationFilter( context1.SymbolTable, context1.PathTable, new FullSymbol[0], new[] { AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"testFile{index}.txt")), }, CollectionUtilities.EmptyArray <StringId>()); configuration1.Layout.ObjectDirectory = AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"ObjectDirectory{index}")); configuration1.Layout.TempDirectory = AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"TempDirectory{index}")); configuration1.Layout.SourceDirectory = AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"SourceDirectory{index}")); configuration1.Logging.SubstTarget = AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"SubstTarget{index}")); configuration1.Engine.CompressGraphFiles = flag; configuration1.Schedule.SkipHashSourceFile = flag; configuration1.Schedule.ComputePipStaticFingerprints = flag; var loggingContext1 = CreateLoggingContextForTest(); var fileContentTable1 = FileContentTable.CreateNew(loggingContext1); return(GraphFingerprinter.TryComputeFingerprint(loggingContext1, configuration1.Startup, configuration1, context1.PathTable, evaluationFilter1, fileContentTable1, "111aaa", null).ExactFingerprint); }
/// <summary> /// Filter a workspace definition. /// </summary> public List <ModuleDefinition> FilterWorkspaceDefinition( [NotNull] WorkspaceDefinition workspace, [NotNull] EvaluationFilter evaluationFilter, [NotNull] ISpecDependencyProvider provider) { // Resulting list should always have a prelude. var modulesToInclude = new HashSet <ModuleDefinition> { workspace.PreludeModule }; // Getting all the files relevant to the build. var filesToInclude = GetFilesToInclude(modulesToInclude, workspace, provider, evaluationFilter); // Keep modules with a set of files relevant for a given filter. var partiallyFilteredModules = new Dictionary <ModuleDefinition, HashSet <AbsolutePath> >(); foreach (var kvp in workspace.Specs) { // File is not part of 'must have' module and is part of 'must have' spec. if (filesToInclude.Contains(kvp.Path)) { var map = partiallyFilteredModules.GetOrAdd(kvp.OwningModule, k => new HashSet <AbsolutePath>()); map.Add(kvp.Path); } } foreach (var kvp in partiallyFilteredModules) { var moduleDefinition = kvp.Key.WithSpecs(kvp.Value.ToReadOnlySet()); modulesToInclude.Add(moduleDefinition); } return(modulesToInclude.ToList()); }
public static string EvaluateSubAct(Int64 ItemId, int CommunityID, EvaluationFilter filter, DateTime?certifiedDate, Boolean isMooc) { if (certifiedDate == null) { certifiedDate = DateTime.Now; } return("Modules/Moocs/UsersStat.aspx?ItemId=" + ItemId + "&ComId=" + CommunityID + "&It=" + (int)ItemType.SubActivity + "&Page={0}&Eval=1&Filter=" + (int)filter + "&ST=" + ((DateTime)certifiedDate).Ticks.ToString() + (isMooc ? "&isMooc=" + isMooc.ToString() : "")); }
/// <summary> /// Collects the files to analyze based on the evaluation filter. /// </summary> /// <remarks> /// The workspace was built using the transitive closure and an over approximation of the specs. /// For example: when passing a single spec file, the workspace loads with all specs in the module /// it is a part of as well as all transitive modules. /// This filter extracts the set of files that the filter selected so that we only analyze and /// optionally fix the specified specs. /// </remarks> private static IReadOnlyDictionary <AbsolutePath, ISourceFile> CollectFilesToAnalyze( Workspace workspace, PathTable pathTable, AbsolutePath primaryConfigFile, EvaluationFilter evaluationFilter) { // In this case we analyze all specs of all modules that the workspace contains if (!evaluationFilter.CanPerformPartialEvaluationScript(primaryConfigFile)) { return(workspace.SpecSources.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.SourceFile)); } var result = new Dictionary <AbsolutePath, ISourceFile>(); foreach (var moduleToResolve in evaluationFilter.ModulesToResolve) { // Skip out the prelude module since we don't support all its constructs yet. foreach (var module in workspace.SpecModules) { if (string.Equals(module.Descriptor.Name, moduleToResolve.ToString(pathTable.StringTable))) { foreach (var kv in module.Specs) { result[kv.Key] = kv.Value; } } } } foreach (var specRootToResolve in evaluationFilter.ValueDefinitionRootsToResolve) { foreach (var kv in workspace.SpecSources) { if (workspace.PreludeModule != null) { if (workspace.PreludeModule.Specs.ContainsKey(kv.Key)) { // Skip out the specs that are the prelude since we don't support all its constructs yet. continue; } } if (kv.Key == specRootToResolve || kv.Key.IsWithin(pathTable, specRootToResolve)) { result[kv.Key] = kv.Value.SourceFile; } } } return(result); }
public static bool TryBuildWorkspace( ICommandLineConfiguration commandLineConfig, FrontEndContext frontEndContext, EngineContext engineContext, EvaluationFilter evaluationFilter, EventHandler <WorkspaceProgressEventArgs> progressHandler, out Workspace workspace, out FrontEndHostController frontEndHostController, out IMutablePipGraph pipGraph, WorkspaceBuilderConfiguration configuration, FrontEndEngineAbstraction frontEndEngineAbstraction = null, bool collectMemoryAsSoonAsPossible = true) { return(TryBuildWorkspaceInternal(commandLineConfig, frontEndContext, engineContext, evaluationFilter, progressHandler, out workspace, out frontEndHostController, out pipGraph, configuration, forIDE: false, frontEndEngineAbstraction, collectMemoryAsSoonAsPossible)); }
public void GenerateHashWithDifferentEvaluationFilters() { WriteFile("config.ds", "SampleConfig"); var context1 = BuildXLContext.CreateInstanceForTesting(); var configuration1 = ConfigurationHelpers.GetDefaultForTesting(context1.PathTable, AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, "config.ds"))); var evaluationFilter1 = new EvaluationFilter( context1.SymbolTable, context1.PathTable, new FullSymbol[0], new[] { AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"testFile1.txt")), }, CollectionUtilities.EmptyArray <StringId>()); var evaluationFilter2 = new EvaluationFilter( context1.SymbolTable, context1.PathTable, new FullSymbol[0], new[] { AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"testFile2.txt")), }, CollectionUtilities.EmptyArray <StringId>()); configuration1.Layout.ObjectDirectory = AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"ObjectDirectory1")); configuration1.Layout.TempDirectory = AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"TempDirectory1")); configuration1.Layout.SourceDirectory = AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"SourceDirectory1")); configuration1.Logging.SubstTarget = AbsolutePath.Create(context1.PathTable, Path.Combine(TemporaryDirectory, $"SubstTarget1")); configuration1.Engine.CompressGraphFiles = false; configuration1.Schedule.SkipHashSourceFile = false; configuration1.Schedule.ComputePipStaticFingerprints = false; var loggingContext1 = CreateLoggingContextForTest(); var fileContentTable1 = FileContentTable.CreateNew(loggingContext1); var oldFingerprint = GraphFingerprinter.TryComputeFingerprint(loggingContext1, configuration1.Startup, configuration1, context1.PathTable, evaluationFilter1, fileContentTable1, "111aaa", null).ExactFingerprint; var newFingerprint1 = GraphFingerprinter.TryComputeFingerprint(loggingContext1, configuration1.Startup, configuration1, context1.PathTable, evaluationFilter2, fileContentTable1, "111aaa", null).ExactFingerprint; var comparison = oldFingerprint.CompareFingerprint(newFingerprint1); Assert.Equal(GraphCacheMissReason.EvaluationFilterChanged, comparison); }
/// <nodoc /> internal void FilterWorkspace(Workspace workspace, EvaluationFilter evaluationFilter) { if (!evaluationFilter.CanPerformPartialEvaluationScript(PrimaryConfigFile)) { return; } using (var sw = Watch.Start()) { int originalCount = workspace.SpecCount; // WorkspaceFilter updates the existing workspace instead of creating brand new one. // This is crucial to avoid redundant type checking required for semantic workspace creation. var filter = new WorkspaceFilter(FrontEndContext.PathTable); workspace.FilterWorkspace(filter.FilterForConversion(workspace, evaluationFilter)); Logger.WorkspaceFiltered(LoggingContext, workspace.SpecCount, originalCount, sw.ElapsedMilliseconds); } }
private List <ModuleDefinition> GetModulesAndSpecsToEvaluate(EvaluationFilter evaluationFilter) { if ( // Module filter can be applied without /enableIncrementalFrontEnd+ evaluationFilter.ModulesToResolve.Count != 0 || (evaluationFilter.CanPerformPartialEvaluationScript(PrimaryConfigFile) && FrontEndConfiguration.EnableIncrementalFrontEnd())) { var workspaceFilter = new WorkspaceFilter(FrontEndContext.PathTable); return(workspaceFilter.FilterForEvaluation(Workspace, evaluationFilter)); } // The prelude is never part of the build extent var allModules = Workspace.SpecModules.ToList(); // Under an Office build, the default source resolver defines the build extent. Otherwise, all modules are used var moduleExtent = FrontEndConfiguration.UseLegacyOfficeLogic() ? FindModulesConstitutingLegacyBuildExtent(allModules) : allModules; return(moduleExtent.Select(m => m.Definition).ToList()); }
private static bool TryGetEvaluationFilter( LoggingContext loggingContext, EngineContext engineContext, string filter, out EvaluationFilter evaluationFilter) { FilterParser parser = new FilterParser( engineContext, DummyPathResolver, filter); RootFilter rootFilter; FilterParserError error; if (!parser.TryParse(out rootFilter, out error)) { Logger.Log.ErrorParsingFilter(loggingContext, filter, error.Position, error.Message, error.FormatFilterPointingToPosition(filter)); evaluationFilter = null; return(false); } evaluationFilter = rootFilter.GetEvaluationFilter(engineContext.SymbolTable, engineContext.PathTable); return(true); }
public List <ParsedModule> FilterForConversion([NotNull] Workspace workspace, [NotNull] EvaluationFilter evaluationFilter) { // TODO: need to check that file2file map is available and skip filtering otherwise. var spec2SpecMapProvider = new WorkspaceBasedSpecDependencyProvider(workspace, m_pathTable); // First, getting all modules, that satisfy the module filter. var modulesToInclude = GetModulesToInclude(workspace, spec2SpecMapProvider, evaluationFilter); // Second, getting all the specs, that satisfy the spec filter. var filesToInclude = GetFilesToInclude( modulesToInclude, workspace, spec2SpecMapProvider, evaluationFilter); // Third, constructing a new set of modules based on a filtered set of specs. var partiallyFilteredModules = new Dictionary <ModuleDefinition, Dictionary <AbsolutePath, ISourceFile> >(); foreach (var kvp in workspace.SpecSources) { // File is not part of 'must have' module and is part of 'must have' spec. if (!modulesToInclude.Contains(kvp.Value.OwningModule) && filesToInclude.Contains(kvp.Key)) { var map = partiallyFilteredModules.GetOrAdd(kvp.Value.OwningModule.Definition, k => new Dictionary <AbsolutePath, ISourceFile>()); map[kvp.Key] = kvp.Value.SourceFile; } } foreach (var kvp in partiallyFilteredModules) { // Need to recreate both - module definition and parsed module, // because the set of specs is different. var moduleDefinition = kvp.Key.WithSpecs(kvp.Value.Keys.ToReadOnlySet()); var parsedPartialModule = new ParsedModule(moduleDefinition, kvp.Value, workspace.GetModuleByModuleDescriptor(moduleDefinition.Descriptor).ReferencedModules); modulesToInclude.Add(parsedPartialModule); } return(modulesToInclude.ToList()); }
/// <summary> /// Run the test /// </summary> public bool Run(string testFolder, string specFile, string fullIdentifier, string shortName, string lkgFile, params string[] sdksToResolve) { Contract.Requires(!string.IsNullOrEmpty(testFolder)); Contract.Requires(!string.IsNullOrEmpty(specFile)); Contract.Requires(sdksToResolve != null); // Sadly the frontend doesn't use the engine abstractions file api's so we have to materialize stuff on disk for now... // TODO: Fix this code once the frontend supports a proper virtual FileSystem. // TODO: Change the package semantics to implicit when we expose a way to evaluate a single value var testFileName = Path.GetFileName(specFile); var mainFileName = "testMain.bp"; var testMainFile = Path.Combine(testFolder, mainFileName); Directory.CreateDirectory(testFolder); File.WriteAllText(Path.Combine(testFolder, Names.ModuleConfigBm), I($@"module( {{ name: 'TestPackage', nameResolutionSemantics: NameResolutionSemantics.implicitProjectReferences, projects: [ f`{mainFileName}`, f`{testFileName}`, ], }});")); File.WriteAllText(testMainFile, I($@" export const testFolder = d`{Path.GetDirectoryName(specFile).Replace('\\', '/')}`; @@public export const main = {fullIdentifier}();")); File.Copy(specFile, Path.Combine(testFolder, testFileName)); // Create a fake package for Sdk.TestRunner so that you can safely test packages that have the tests embedded in them. var testRunnerFolder = Path.Combine(testFolder, "Sdk.TestRunner"); Directory.CreateDirectory(testRunnerFolder); File.WriteAllText(Path.Combine(testRunnerFolder, Names.ModuleConfigBm), I($"module({{\n\tname: 'Sdk.TestRunner',\n}});")); File.WriteAllText(Path.Combine(testRunnerFolder, "package" + Names.DotDscExtension), I($@" export interface TestArguments {{ testFiles: File[]; sdkFolders?: (Directory|StaticDirectory)[]; autoFixLkgs?: boolean; }} export interface TestResult {{ xmlResults: File; }} export function test(args: TestArguments): TestResult {{ Contract.fail(""Can't run a DScript UnitTest inside of a DScript UnitTest""); }}")); // Setup Context and configuration var frontEndContext = FrontEndContext.CreateInstanceForTesting(); var pipContext = new SchedulerContext(CancellationToken.None, frontEndContext.StringTable, frontEndContext.PathTable, frontEndContext.SymbolTable, frontEndContext.QualifierTable); var pathTable = frontEndContext.PathTable; var testFolderPath = AbsolutePath.Create(pathTable, testFolder); var configuration = CreateConfiguration(sdksToResolve.Union(new[] { testRunnerFolder }), pathTable, testFolderPath); var engineAbstraction = new TestEngineAbstraction(pathTable, frontEndContext.StringTable, testFolderPath, new PassThroughFileSystem(pathTable)); var frontEndStatistics = new FrontEndStatistics(); if (!CreateFactories( frontEndContext, engineAbstraction, frontEndStatistics, configuration, out var ambientTesting, out var moduleRegistry, out var frontEndFactory)) { return(false); } // Set the timeout to a large number to avoid useless performance collections in tests. using (var performanceCollector = new PerformanceCollector(TimeSpan.FromHours(1))) using (var frontEndHostController = new FrontEndHostController( frontEndFactory, new EvaluationScheduler(1), moduleRegistry, frontEndStatistics, m_tracingLogger, performanceCollector, collectMemoryAsSoonAsPossible: true)) { var frontEndController = (IFrontEndController)frontEndHostController; frontEndController.InitializeHost(frontEndContext, configuration); frontEndController.ParseConfig(configuration); // Populate the graph using (var pipTable = new PipTable( pipContext.PathTable, pipContext.SymbolTable, initialBufferSize: 16384, maxDegreeOfParallelism: 1, debug: true)) { var mountPathExpander = new MountPathExpander(pathTable); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "testFolder"), testFolderPath, allowHashing: true, readable: true, writable: false)); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "src"), testFolderPath.Combine(pathTable, "src"), allowHashing: true, readable: true, writable: true)); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "out"), testFolderPath.Combine(pathTable, "out"), allowHashing: true, readable: true, writable: true)); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "noRead"), testFolderPath.Combine(pathTable, "noRead"), allowHashing: true, readable: false, writable: true)); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "temp"), engineAbstraction.Layout.TempDirectory, allowHashing: true, readable: true, writable: true)); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "obj"), engineAbstraction.Layout.ObjectDirectory, allowHashing: true, readable: true, writable: true)); var graph = new PipGraph.Builder( pipTable, pipContext, m_pipLogger, frontEndContext.LoggingContext, configuration, mountPathExpander); using (var cacheLayer = new EngineCache( new InMemoryArtifactContentCache(), new InMemoryTwoPhaseFingerprintStore())) { var cache = Task.FromResult(Possible.Create(cacheLayer)); try { var evaluationFilter = new EvaluationFilter( pipContext.SymbolTable, pipContext.PathTable, new FullSymbol[0], new[] { AbsolutePath.Create(frontEndContext.PathTable, testMainFile), }, CollectionUtilities.EmptyArray <StringId>()); if (!frontEndController.PopulateGraph(cache, graph, engineAbstraction, evaluationFilter, configuration, configuration.Startup)) { HandleDiagnostics(); return(false); } } catch (AggregateException e) { var baseException = e.GetBaseException(); if (baseException is XunitException) { // If it is an XUnit assert, then unwrap the exception and throw that because XUnit other doesn't display the error nicely. ExceptionDispatchInfo.Capture(baseException).Throw(); } throw; } } if (!ValidatePips(frontEndContext, graph, testFolderPath, specFile, shortName, lkgFile, ambientTesting.DontValidatePipsEnabled)) { return(false); } } } HandleDiagnostics(); return(true); }
private HashSet <AbsolutePath> GetFilesToInclude(HashSet <ModuleDefinition> modulesToInclude, WorkspaceDefinition workspace, ISpecDependencyProvider provider, EvaluationFilter evaluationFilter) { var filesToInclude = new HashSet <AbsolutePath>(); foreach (var kvp in workspace.Specs) { var specPath = kvp.Path; // If the spec belonged to a module that is required, then the entire module will be evaluated. // No need to keep the spec separately if (!modulesToInclude.Contains(kvp.OwningModule)) { foreach (var specRootToResolve in evaluationFilter.ValueDefinitionRootsToResolve) { if (specPath == specRootToResolve || specPath.IsWithin(m_pathTable, specRootToResolve)) { filesToInclude.Add(specPath); AddUpStreamDependencies(filesToInclude, specPath, provider); } } } } return(filesToInclude); }
/// <summary> /// Tries to filter a given workspace definition by reusing information from the previous BuildXL invocation. /// </summary> /// <returns> /// 1. Failure if the error occurred during parsing/binding one of the changed specs. /// 2. Result(null) when the filtering failed due to symbols mismatch or due to another reason. /// 3. Result(WorkspaceDefinition) when the filtering succeeded. /// </returns> /// <remarks> /// If the previous binding information can be reused, then the set of specs that are safe to use as public facades + serialized AST /// are identified as well /// </remarks> private async Task <FilteredWorkspaceDefinition> TryFilterWorkspaceDefinitionIncrementallyAsync( List <string> changedFiles, IWorkspaceProvider workspaceProvider, WorkspaceDefinition workspaceDefinition, EvaluationFilter evaluationFilter) { Logger.TryingToReuseFrontEndSnapshot(LoggingContext); // TODO: potentially, we could check the number of changes compared to the workspace definition size. // If the number of changes is too big, maybe we should go into the full parse mode. // But we need to check the perf implications before making this decision. var changedSpecs = changedFiles.Select( p => { var fullPath = AbsolutePath.Create(FrontEndContext.PathTable, p); var containingModule = workspaceDefinition.TryGetModuleDefinition(fullPath); return(new SpecWithOwningModule(fullPath, containingModule)); }).ToArray(); // Need to check if the spec does not belong to the current workspace // or the changed spec belongs to the prelude. foreach (var changedSpec in changedSpecs) { if (changedSpec.OwningModule == null) { Logger.FailToReuseFrontEndSnapshot( LoggingContext, I($"Changed spec file '{changedSpec.Path.ToString(FrontEndContext.PathTable)}' is not part of the computed workspace.")); return(FilteredWorkspaceDefinition.CanNotFilter()); } if (changedSpec.OwningModule.Descriptor == workspaceDefinition.PreludeModule.Descriptor) { Logger.FailToReuseFrontEndSnapshot( LoggingContext, I($"Changed spec file '{changedSpec.Path.ToString(FrontEndContext.PathTable)}' is part of the prelude.")); return(FilteredWorkspaceDefinition.CanNotFilter()); } } // Getting the snapshot from the previous run. // Binding snapshot contains all the specs as well as all the configuration files. // Need to adjust the count. var expectedNumberOfSpecs = workspaceDefinition.SpecCount + (workspaceProvider.GetConfigurationModule()?.Specs.Count ?? 0); var snapshot = FrontEndArtifactManager.TryLoadFrontEndSnapshot(expectedNumberOfSpecs); if (snapshot == null) { // The error message was already logged. return(FilteredWorkspaceDefinition.CanNotFilter()); } // Parsing and binding all the changed specs. var possibleParseResult = await workspaceProvider.ParseAndBindSpecsAsync(changedSpecs); var firstFailure = LogParseOrBindingErrorsIfAny(possibleParseResult); if (firstFailure != null) { // This is actual failure. // Instead of switching to the full mode, we can actually stop here. return(FilteredWorkspaceDefinition.Error(firstFailure)); } // Snapshot is valid and parse/binding is completed successfully. var snapshotState = GetSnapshotReuseState(possibleParseResult, snapshot); if (snapshotState.State == SnapshotState.NoMatch) { // NoMatch is returned if the snapshot is unavailable. if (snapshotState.SpecsWithIncompatiblePublicSurface.Count != 0) { Logger.FailToReuseFrontEndSnapshot( LoggingContext, I($"Spec file '{snapshotState.SpecsWithIncompatiblePublicSurface.First().Path.AbsolutePath}' changed its binding symbols.")); } return(FilteredWorkspaceDefinition.CanNotFilter()); } // Changed file could get different symbols. // Need to re-save it within the front-end snapshot. UpdateAndSaveSnapshot(possibleParseResult, snapshot); var snapshotProvider = new SnapshotBasedSpecProvider(snapshot); // Now we know exactly which are all the files that need to go through parsing/type checking/AST conversion. So we // inform that to the artifact manager so the public surface and AST serialization // can be resued for the rest, if available. // Observe these set of files are not reflecting a potential user filter, but that's fine. If there is a dirty spec // that is outside of the filter, that spec won't be requested by the workspace anyway NotifyDirtySpecsForPublicFacadeAndAstReuse( snapshotProvider, workspaceDefinition, changedSpecs.Select(f => f.Path).ToList()); // The fingerprints for all changed specs are still the same, // so we can filter the workspace definition provided that the filter allows it. if (snapshotState.State == SnapshotState.FullMatch) { var filter = new WorkspaceFilter(FrontEndContext.PathTable); var filteredWorkspace = evaluationFilter.CanPerformPartialEvaluationScript(PrimaryConfigFile) ? filter.FilterWorkspaceDefinition(workspaceDefinition, evaluationFilter, snapshotProvider) : workspaceDefinition.Modules; return(FilteredWorkspaceDefinition.Filter(new WorkspaceDefinition(filteredWorkspace, workspaceDefinition.PreludeModule))); } // Specs are not the same, but we would be able to load public facades for all unaffected specs. var dirtySpecNames = string.Join( ", ", snapshotState.SpecsWithTheSamePublicSurface.Take(10).Select(p => Path.GetFileName(p.Path.AbsolutePath))); Logger.FailedToFilterWorkspaceDefinition( LoggingContext, I($"{dirtySpecNames} changed one or more declarations.")); return(FilteredWorkspaceDefinition.CanNotFilter()); }
private static bool TryBuildWorkspaceInternal( ICommandLineConfiguration commandLineConfig, FrontEndContext frontEndContext, EngineContext engineContext, EvaluationFilter evaluationFilter, EventHandler <WorkspaceProgressEventArgs> progressHandler, out Workspace workspace, out FrontEndHostController frontEndHostController, out IMutablePipGraph pipGraph, WorkspaceBuilderConfiguration configuration, bool forIDE, FrontEndEngineAbstraction frontEndEngineAbstraction = null, bool collectMemoryAsSoonAsPossible = true) { Contract.Requires((commandLineConfig.Engine.Phase & (EnginePhases.ParseWorkspace | EnginePhases.AnalyzeWorkspace)) != EnginePhases.None); Contract.Requires(frontEndContext != null); Contract.Requires(engineContext != null); Contract.Requires(commandLineConfig.Startup.ConfigFile.IsValid); Contract.Requires(evaluationFilter != null); workspace = null; frontEndHostController = null; pipGraph = null; var pathTable = engineContext.PathTable; var loggingContext = frontEndContext.LoggingContext; var mutableCommandlineConfig = GetCommandLineConfiguration( commandLineConfig, configuration); BuildXLEngine.ModifyConfigurationForCloudbuild(mutableCommandlineConfig, false, pathTable, loggingContext); BuildXLEngine.PopulateLoggingAndLayoutConfiguration(mutableCommandlineConfig, pathTable, bxlExeLocation: null); var statistics = new FrontEndStatistics(progressHandler); var frontEndControllerFactory = FrontEndControllerFactory.Create( mode: FrontEndMode.NormalMode, loggingContext: loggingContext, configuration: mutableCommandlineConfig, collector: null, statistics: statistics, collectMemoryAsSoonAsPossible: collectMemoryAsSoonAsPossible); var controller = frontEndControllerFactory.Create(engineContext.PathTable, engineContext.SymbolTable); controller.InitializeHost(frontEndContext, mutableCommandlineConfig); frontEndHostController = (FrontEndHostController)controller; // If there is an explicit engine abstraction, we set it. This is used by the IDE. if (frontEndEngineAbstraction != null) { // The IDE engine typically doesn't have mounts configured. We do it here if they haven't been configured yet. // Observe these are just the default mounts used for config evaluation. if (frontEndEngineAbstraction is BasicFrontEndEngineAbstraction basicEngine && !frontEndEngineAbstraction.GetMountNames("Script", BuildXL.Utilities.ModuleId.Invalid).Any()) { // If this fails we just ignore the failure. Mounts not being properly configured doesn't prevent the IDE plugin from working. basicEngine.TryPopulateWithDefaultMountsTable(loggingContext, engineContext, mutableCommandlineConfig, mutableCommandlineConfig.Startup.Properties); } frontEndHostController.SetState(frontEndEngineAbstraction, pipGraph: null, configuration: mutableCommandlineConfig); } else { // Otherwise we construct one with all mounts populated for config evaluation var configurationEngine = new BasicFrontEndEngineAbstraction(engineContext.PathTable, engineContext.FileSystem, mutableCommandlineConfig); if (!configurationEngine.TryPopulateWithDefaultMountsTable(loggingContext, engineContext, mutableCommandlineConfig, mutableCommandlineConfig.Startup.Properties)) { // Errors are logged already return(false); } frontEndEngineAbstraction = configurationEngine; } var config = controller.ParseConfig(frontEndEngineAbstraction, mutableCommandlineConfig); if (config == null) { return(false); } IMutablePipGraph pipGraphBuilder = null; using (var cache = Task.FromResult <Possible <EngineCache> >( new EngineCache( new InMemoryArtifactContentCache(), // Note that we have an 'empty' store (no hits ever) rather than a normal in memory one. new EmptyTwoPhaseFingerprintStore()))) { var mountsTable = MountsTable.CreateAndRegister(loggingContext, engineContext, config, mutableCommandlineConfig.Startup.Properties); // For the IDE case, we want to make sure all config-specific mounts are properly populated if (forIDE && frontEndEngineAbstraction is BasicFrontEndEngineAbstraction languageServiceEngine) { Contract.AssertNotNull(frontEndEngineAbstraction); AddConfigurationMounts(config, mountsTable); languageServiceEngine.SetMountsTable(mountsTable); } if (frontEndEngineAbstraction == null) { if (mutableCommandlineConfig.Engine.Phase.HasFlag(EnginePhases.Schedule)) { frontEndEngineAbstraction = new FrontEndEngineImplementation( loggingContext, frontEndContext.PathTable, config, mutableCommandlineConfig.Startup, mountsTable, InputTracker.CreateDisabledTracker(loggingContext), null, null, () => FileContentTable.CreateStub(loggingContext), 5000, false, controller.RegisteredFrontEnds); var searchPathToolsHash = new DirectoryMembershipFingerprinterRuleSet(config, engineContext.StringTable).ComputeSearchPathToolsHash(); pipGraphBuilder = new PipGraph.Builder( EngineSchedule.CreateEmptyPipTable(engineContext), engineContext, Pips.Tracing.Logger.Log, loggingContext, config, mountsTable.MountPathExpander, fingerprintSalt: config.Cache.CacheSalt, searchPathToolsHash: searchPathToolsHash); // Observe mount table is completed during workspace construction AddConfigurationMounts(config, mountsTable); IDictionary <ModuleId, MountsTable> moduleMountsTableMap; if (!mountsTable.PopulateModuleMounts(config.ModulePolicies.Values, out moduleMountsTableMap)) { Contract.Assume(loggingContext.ErrorWasLogged, "An error should have been logged after MountTable.PopulateModuleMounts()"); return(false); } } else { frontEndEngineAbstraction = new BasicFrontEndEngineAbstraction(frontEndContext.PathTable, frontEndContext.FileSystem, config); } } using (frontEndEngineAbstraction is IDisposable ? (IDisposable)frontEndEngineAbstraction : null) { // Attempt to build and/or analyze the workspace if (!controller.PopulateGraph( cache: cache, graph: pipGraphBuilder, engineAbstraction: frontEndEngineAbstraction, evaluationFilter: evaluationFilter, configuration: config, startupConfiguration: mutableCommandlineConfig.Startup)) { workspace = frontEndHostController.GetWorkspace(); // Error has been reported already return(false); } pipGraph = pipGraphBuilder; } } Contract.Assert(frontEndHostController != null); workspace = frontEndHostController.GetWorkspace(); if (mutableCommandlineConfig.Engine.Phase == EnginePhases.AnalyzeWorkspace) { // If workspace construction is successful, we run the linter on all specs. // This makes sure the workspace will carry all the errors that will occur when running the same specs in the regular engine path workspace = CreateLintedWorkspace( workspace, frontEndContext.LoggingContext, config.FrontEnd, pathTable); } return(true); }
public JsonResult GetCurrentActiveEvaluations(EvaluationFilter evaluationFilter) { try { var token = _tokenValidator.Validate(HttpContext); if (!token.Success) { return(Json(new ReturnData <string> { Success = false, NotAuthenticated = true, Message = $"Unauthorized:-{token.Message}", })); } var currentActive = _context .EvaluationsCurrentActive .Join(_context.EvaluationsCurrents, evaluationsCurrentActive => evaluationsCurrentActive.EvaluationsCurrentId, evaluationCurrentEvaluation => evaluationCurrentEvaluation.Id, (evalCurrentAct, evalCurrent) => new { evalCurrentAct.Status, evalCurrentAct.StartDate, evalCurrentAct.EndDate, evalCurrentAct.EvaluationsId, evalCurrentAct.EvaluationsCurrentId, evalCurrentAct.Id, evalCurrentAct.EvaluationTarget, TotalUnits = _context .EvaluationTakenUnitWiseByUsers .Count(c => c.EvaluationCurrentActiveId == evalCurrentAct.Id), evalCurrent.CurrentEvaluationName, } ) .Where(e => e.Status && e.StartDate.Date >= evaluationFilter.StartDate.Date && evaluationFilter.EndDate == null || e.EndDate.Date <= (evaluationFilter.EndDate ?? DateTime.Now.Date)).ToList(); if (!string.IsNullOrEmpty(evaluationFilter.SearchText)) { currentActive = currentActive.Where(e => e.CurrentEvaluationName.ToLower().Contains(evaluationFilter.SearchText) || e.EvaluationTarget.Equals(evaluationFilter.SearchText)).ToList(); } if (currentActive.Count > 0) { var student = new StudentAcademicViewModel(); currentActive.ForEach(e => { /**{ name: 'All', value: 0 },{ name: 'Year', value: 1 }, * {name: 'faculty', value: 2 * },{name: 'department', value: 3 }, { name: 'programme',value: 4},{name: 'class',value: 5}, * {name: 'student',value: 6} */ }); return(Json(new ReturnData <dynamic> { Success = true, Message = "", Data = currentActive })); } return(Json(new ReturnData <dynamic> { Success = false, Message = "Oops,seems you dont have an evaluation periods create between this dates", })); } catch (Exception ex) { return(Json(new ReturnData <dynamic> { Success = false, Message = "An error occured ,please try again", Error = new Error(ex) })); } }
/// <summary> /// Tries to create a filtered workspace based on a front-end snapshot from the previous BuildXL invocation. /// </summary> /// <returns> /// * Possibke<ValidConstructedWorkspace> when the workspace was successfully constructed. /// * Possible<null> when the snapshot was unaiable. /// * Failure when the snapshot was available but parsing failed. /// </returns> private async Task <Possible <Workspace> > TryCreateFilteredWorkspaceAsync(Possible <WorkspaceDefinition> workspaceDefinition, IWorkspaceProvider workspaceProvider, FrontEndEngineAbstraction engineAbstraction, EvaluationFilter evaluationFilter) { if (!FrontEndConfiguration.ConstructAndSaveBindingFingerprint()) { Logger.FailToReuseFrontEndSnapshot( LoggingContext, "Binding fingerprint is disabled. Please use 'constructAndSaveBindingFingerprint' option to turn it on"); return(default(Possible <Workspace>)); } // If a filter cannot be performed and public facade + AST is not to be used, then there is no point in continuing and we can // go to full mode if (!evaluationFilter.CanPerformPartialEvaluationScript(PrimaryConfigFile) && !CanUseSpecPublicFacadeAndAst()) { var message = !CanUseSpecPublicFacadeAndAst() ? "Engine state was not reloaded" : "User filter was not specified"; Logger.FailToReuseFrontEndSnapshot(LoggingContext, message); return(default(Possible <Workspace>)); } var changedFiles = engineAbstraction.GetChangedFiles()?.ToList(); if (changedFiles == null) { Logger.FailToReuseFrontEndSnapshot(LoggingContext, "Change journal is not available"); return(default(Possible <Workspace>)); } using (var sw = Watch.Start()) { // We're potentially in incremental mode. var filteredDefinitionResult = await TryFilterWorkspaceDefinitionIncrementallyAsync( changedFiles, workspaceProvider, workspaceDefinition.Result, evaluationFilter); if (filteredDefinitionResult.Failed) { return(filteredDefinitionResult.Failure); } if (filteredDefinitionResult.Filtered) { var filteredDefinition = filteredDefinitionResult.FilteredDefinition; Logger.WorkspaceDefinitionFiltered( LoggingContext, filteredDefinition.SpecCount, workspaceDefinition.Result.SpecCount, sw.ElapsedMilliseconds); // TODO: with C# 7, use tuple instead of changing the workspace to carry the information about the filtering. return(await workspaceProvider.CreateWorkspaceAsync(filteredDefinition, userFilterWasApplied : true)); } } return(default(Possible <Workspace>)); }
private bool ConstructAndEvaluateGraph( LoggingContext loggingContext, FrontEndEngineAbstraction frontEndEngineAbstration, CacheInitializationTask engineCacheTask, MountsTable mountsTable, EvaluationFilter evaluationFilter, [CanBeNull] GraphReuseResult reuseResult, out PipGraph pipGraph) { Contract.Requires(frontEndEngineAbstration != null); Contract.Requires(engineCacheTask != null); Contract.Requires(mountsTable != null); pipGraph = null; IPipGraphBuilder pipGraphBuilder = null; if (!AddConfigurationMountsAndCompleteInitialization(loggingContext, mountsTable)) { return(false); } IDictionary <ModuleId, MountsTable> moduleMountsTableMap; if (!mountsTable.PopulateModuleMounts(Configuration.ModulePolicies.Values, out moduleMountsTableMap)) { Contract.Assume(loggingContext.ErrorWasLogged, "An error should have been logged after MountTable.PopulateModuleMounts()"); return(false); } m_visualization?.MountsTable.MakeAvailable(mountsTable); if ((Configuration.Engine.Phase & EnginePhases.Schedule) != 0) { pipGraphBuilder = CreatePipGraphBuilder(loggingContext, mountsTable, reuseResult); } // Have to do some horrible magic here to get to a proper Task<T> with the BuildXL cache since // someone updated the engine cache to be an await style pattern, and there is no way to get to the EngineCache // If the cache was fast to startup, but perhaps blocked itself on first access we wouldn't have to do all these hoops. Func <Task <Possible <EngineCache> > > getBuildCacheTask = async() => { return((await engineCacheTask).Then(engineCache => engineCache.CreateCacheForContext())); }; if (!FrontEndController.PopulateGraph( getBuildCacheTask(), pipGraphBuilder, frontEndEngineAbstration, evaluationFilter, Configuration, m_initialCommandLineConfiguration.Startup)) { LogFrontEndStats(loggingContext); Contract.Assume(loggingContext.ErrorWasLogged, "An error should have been logged after FrontEndController.PopulateGraph()"); return(false); } LogFrontEndStats(loggingContext); // Pip graph must become immutable now that evaluation is done (required to construct a scheduler). return(pipGraphBuilder == null || (pipGraph = pipGraphBuilder.Build()) != null); }
private async Task <Workspace> BuildAndFilterWorkspaceAsync(WorkspaceDefinition workspaceDefinition, IWorkspaceProvider workspaceProvider, FrontEndEngineAbstraction engineAbstraction, EvaluationFilter evaluationFilter) { // First, trying to filter workspace based on information from the previous run var possibleFilteredWorkspace = await TryCreateFilteredWorkspaceAsync(workspaceDefinition, workspaceProvider, engineAbstraction, evaluationFilter); if (!possibleFilteredWorkspace.Succeeded) { // Error was already logged return(Workspace.Failure(workspaceProvider, workspaceProvider.Configuration, possibleFilteredWorkspace.Failure)); } // If the filtered workspace is not null, just return it. // Otherwise falling back to the full parse mode. if (possibleFilteredWorkspace.Result != null) { return(possibleFilteredWorkspace.Result); } // "Incremental" workspace construction has failed, but we still can try to use module filter to build a smaller workspace. if (evaluationFilter.ModulesToResolve.Count != 0) { var filteredDefinition = this.ApplyModuleFilter(workspaceDefinition, evaluationFilter.ModulesToResolve); return(await workspaceProvider.CreateWorkspaceAsync(filteredDefinition, userFilterWasApplied : true)); } Logger.BuildingFullWorkspace(LoggingContext); return(await workspaceProvider.CreateWorkspaceAsync(workspaceDefinition, userFilterWasApplied : false)); }
public void TargetWorkspaceHasFullModulesAndPartialModulesBasedOnAFilter() { // Arrange // Base module. Root spec var baseModule = ModuleDescriptor.CreateForTesting("MyBaseModule"); var baseModuleSourceFile = SourceFile(baseSpec); var baseModuleSourceFile2 = SourceFile(baseSpec2); // MyModule: depends on a spec from the base module var moduleDescriptor = ModuleDescriptor.CreateForTesting("MyModule"); var mySpecPath = myModule; var moduleSourceFile = SourceFile(mySpecPath); var moduleSourceFile2 = SourceFile(myModule2); // MyDerivedModule: depends on MyModule spec var derivedDescriptor = ModuleDescriptor.CreateForTesting("MyDerivedModule"); var myDerivedSourceFile = SourceFile(myDerivedModule); var workspace = CreateWorkspace( CreateEmptyParsedModule(moduleDescriptor, moduleSourceFile, moduleSourceFile2), CreateEmptyParsedModule(baseModule, baseModuleSourceFile, baseModuleSourceFile2), CreateEmptyParsedModule(derivedDescriptor, myDerivedSourceFile)); AddUpStreamDependency(workspace, moduleSourceFile, baseSpec); AddUpStreamDependency(workspace, myDerivedSourceFile, mySpecPath); // Filter takes base module and one spec from the derived one var filter = new EvaluationFilter( m_symbolTable, m_pathTable, valueNamesToResolve: CollectionUtilities.EmptyArray <FullSymbol>(), valueDefinitionRootsToResolve: new List <AbsolutePath>() { AbsolutePath.Create(m_pathTable, mySpecPath) }, modulesToResolver: new List <StringId>() { StringId.Create(StringTable, "MyBaseModule") }); // Act FilterWorkspace(workspace, filter); // Assert var moduleFromFilteredWorksapce = workspace.SpecModules.FirstOrDefault(m => m.Descriptor.Name == "MyModule"); Assert.NotNull(moduleFromFilteredWorksapce); // MyModule in filtered workspace has just one spec Assert.Equal(moduleFromFilteredWorksapce.Specs.Count, 1); Assert.Equal(moduleFromFilteredWorksapce.Specs.First().Value, moduleSourceFile); // Filtered workspace has the base module as well, because there is a dependency between MyModule and BaseModule var baseModuleFromFilteredWorkspace = workspace.SpecModules.FirstOrDefault(m => m.Descriptor.Name == "MyBaseModule"); Assert.NotNull(baseModuleFromFilteredWorkspace); // Both specs from the base module shoudl be presented. Assert.Equal(baseModuleFromFilteredWorkspace.Specs.Count, 2); }
/// <summary> /// Filter workspace and returns a list of module definitions required for evaluation. /// </summary> /// <remarks> /// Unlike <see cref="FilterForConversion"/> this function returns a minimal set of modules that directly /// satisfy a given filter. /// This means that the list returned from this function is a subset of the result returned from <see cref="FilterForConversion"/> /// because latter return a transitive closure of files and modules. /// </remarks> public List <ModuleDefinition> FilterForEvaluation([NotNull] Workspace workspace, [NotNull] EvaluationFilter evaluationFilter) { // First, need to get all modules that satisfy a given filter. var modulesToInclude = new HashSet <ModuleDefinition>(); var modulesToResolve = new HashSet <string>(evaluationFilter.ModulesToResolve.Select(m => m.ToString(m_pathTable.StringTable))); foreach (var module in workspace.SpecModules) { if (modulesToResolve.Contains(module.Descriptor.Name)) { modulesToInclude.Add(module.Definition); } } // Then need to find all specs. But, instead of keeping them in a list, let's store them in a map // to simplify construction of final modules. var partiallyFilteredModules = new Dictionary <ModuleDefinition, Dictionary <AbsolutePath, ISourceFile> >(); foreach (var kvp in workspace.SpecSources) { var parsedSpec = kvp.Key; // If the spec belonged to a module that is required, then the entire module will be evaluated. // No need to keep the spec separately if (!modulesToInclude.Contains(kvp.Value.OwningModule.Definition)) { foreach (var specRootToResolve in evaluationFilter.ValueDefinitionRootsToResolve) { if (parsedSpec == specRootToResolve || parsedSpec.IsWithin(m_pathTable, specRootToResolve)) { // File is not part of 'must have' module and is part of 'must have' spec. var map = partiallyFilteredModules.GetOrAdd( kvp.Value.OwningModule.Definition, k => new Dictionary <AbsolutePath, ISourceFile>()); map[kvp.Key] = kvp.Value.SourceFile; } } } } foreach (var kvp in partiallyFilteredModules) { // Need to recreate both - module definition and parsed module, // becase the set of specs is different. var moduleDefinition = kvp.Key.WithSpecs(kvp.Value.Keys.ToReadOnlySet()); modulesToInclude.Add(moduleDefinition); } return(modulesToInclude.ToList()); }
private HashSet <AbsolutePath> GetFilesToInclude(HashSet <ParsedModule> modulesToInclude, Workspace workspace, ISpecDependencyProvider provider, EvaluationFilter evaluationFilter) { // TODO: merge two 'GetFilesToInclude' methods into one. var filesToInclude = new HashSet <AbsolutePath>(); foreach (var kvp in workspace.SpecSources) { var parsedSpec = kvp.Key; // If the spec belonged to a module that is required, then the entire module will be evaluated. // No need to keep the spec separately if (!modulesToInclude.Contains(kvp.Value.OwningModule)) { foreach (var specRootToResolve in evaluationFilter.ValueDefinitionRootsToResolve) { if (parsedSpec == specRootToResolve || parsedSpec.IsWithin(m_pathTable, specRootToResolve)) { filesToInclude.Add(parsedSpec); AddUpStreamDependencies(filesToInclude, kvp.Key, provider); } } } } return(filesToInclude); }
public static bool TryBuildWorkspace( EnginePhases phase, FrontEndContext frontEndContext, PipExecutionContext engineContext, AbsolutePath configFile, EvaluationFilter evaluationFilter, EventHandler <WorkspaceProgressEventArgs> progressHandler, out Workspace workspace, out FrontEndHostController frontEndHostController, WorkspaceBuilderConfiguration configuration, FrontEndEngineAbstraction frontEndEngineAbstraction = null, bool collectMemoryAsSoonAsPossible = true) { Contract.Requires((phase & (EnginePhases.ParseWorkspace | EnginePhases.AnalyzeWorkspace)) != EnginePhases.None); Contract.Requires(frontEndContext != null); Contract.Requires(engineContext != null); Contract.Requires(configFile.IsValid); Contract.Requires(evaluationFilter != null); workspace = null; var pathTable = engineContext.PathTable; var loggingContext = frontEndContext.LoggingContext; var commandlineConfig = GetCommandLineConfiguration(configuration, phase, configFile); BuildXLEngine.PopulateLoggingAndLayoutConfiguration(commandlineConfig, pathTable, bxlExeLocation: null); var statistics = new FrontEndStatistics(progressHandler); var frontEndControllerFactory = FrontEndControllerFactory.Create( mode: FrontEndMode.NormalMode, loggingContext: loggingContext, configuration: commandlineConfig, collector: null, statistics: statistics, collectMemoryAsSoonAsPossible: collectMemoryAsSoonAsPossible); var controller = frontEndControllerFactory.Create(engineContext.PathTable, engineContext.SymbolTable); controller.InitializeHost(frontEndContext, commandlineConfig); frontEndHostController = controller as FrontEndHostController; // If there is an explicit engine abstraction, we set it if (frontEndEngineAbstraction != null) { frontEndHostController.SetState(frontEndEngineAbstraction, pipGraph: null, configuration: commandlineConfig); } var config = controller.ParseConfig(commandlineConfig); if (config == null) { frontEndHostController = null; return(false); } using (var cache = Task.FromResult <Possible <EngineCache> >( new EngineCache( new InMemoryArtifactContentCache( new SchedulerContext( CancellationToken.None, frontEndContext.StringTable, frontEndContext.PathTable, frontEndContext.SymbolTable, frontEndContext.QualifierTable)), // Note that we have an 'empty' store (no hits ever) rather than a normal in memory one. new EmptyTwoPhaseFingerprintStore()))) { // Attempt to build and/or analyze the workspace if (!controller.PopulateGraph( cache: cache, graph: null /* No need to create pips */, engineAbstraction: frontEndEngineAbstraction ?? new BasicFrontEndEngineAbstraction(frontEndContext.PathTable, frontEndContext.FileSystem, config), evaluationFilter: evaluationFilter, configuration: config, startupConfiguration: commandlineConfig.Startup)) { Contract.Assert(frontEndHostController != null); workspace = frontEndHostController.GetWorkspace(); // Error has been reported already return(false); } } Contract.Assert(frontEndHostController != null); // If workspace construction is successfull, we run the linter on all specs. // This makes sure the workspace will carry all the errors that will occur when running the same specs in the regular engine path workspace = CreateLintedWorkspace( frontEndHostController.GetWorkspace(), frontEndContext.LoggingContext, config.FrontEnd, pathTable); return(true); }
public static bool TryBuildWorkspace( ICommandLineConfiguration commandLineConfig, FrontEndContext frontEndContext, EngineContext engineContext, EvaluationFilter evaluationFilter, EventHandler <WorkspaceProgressEventArgs> progressHandler, out Workspace workspace, out FrontEndHostController frontEndHostController, out IPipGraph pipGraph, WorkspaceBuilderConfiguration configuration, FrontEndEngineAbstraction frontEndEngineAbstraction = null, bool collectMemoryAsSoonAsPossible = true) { Contract.Requires((commandLineConfig.Engine.Phase & (EnginePhases.ParseWorkspace | EnginePhases.AnalyzeWorkspace)) != EnginePhases.None); Contract.Requires(frontEndContext != null); Contract.Requires(engineContext != null); Contract.Requires(commandLineConfig.Startup.ConfigFile.IsValid); Contract.Requires(evaluationFilter != null); workspace = null; frontEndHostController = null; pipGraph = null; var pathTable = engineContext.PathTable; var loggingContext = frontEndContext.LoggingContext; var mutableCommandlineConfig = GetCommandLineConfiguration( commandLineConfig, configuration); BuildXLEngine.ModifyConfigurationForCloudbuild(mutableCommandlineConfig, false, pathTable, loggingContext); BuildXLEngine.PopulateLoggingAndLayoutConfiguration(mutableCommandlineConfig, pathTable, bxlExeLocation: null); var statistics = new FrontEndStatistics(progressHandler); var frontEndControllerFactory = FrontEndControllerFactory.Create( mode: FrontEndMode.NormalMode, loggingContext: loggingContext, configuration: mutableCommandlineConfig, collector: null, statistics: statistics, collectMemoryAsSoonAsPossible: collectMemoryAsSoonAsPossible); var controller = frontEndControllerFactory.Create(engineContext.PathTable, engineContext.SymbolTable); controller.InitializeHost(frontEndContext, mutableCommandlineConfig); frontEndHostController = (FrontEndHostController)controller; // If there is an explicit engine abstraction, we set it. This is used by IDE test. if (frontEndEngineAbstraction != null) { frontEndHostController.SetState(frontEndEngineAbstraction, pipGraph: null, configuration: mutableCommandlineConfig); } var config = controller.ParseConfig(mutableCommandlineConfig); if (config == null) { return(false); } IPipGraph pipGraphBuilder = null; using (var cache = Task.FromResult <Possible <EngineCache> >( new EngineCache( new InMemoryArtifactContentCache(), // Note that we have an 'empty' store (no hits ever) rather than a normal in memory one. new EmptyTwoPhaseFingerprintStore()))) { if (frontEndEngineAbstraction == null) { if (mutableCommandlineConfig.Engine.Phase.HasFlag(EnginePhases.Schedule)) { var mountsTable = MountsTable.CreateAndRegister(loggingContext, engineContext, config, mutableCommandlineConfig.Startup.Properties); frontEndEngineAbstraction = new FrontEndEngineImplementation( loggingContext, frontEndContext.PathTable, config, mutableCommandlineConfig.Startup, mountsTable, InputTracker.CreateDisabledTracker(loggingContext), null, null, () => FileContentTable.CreateStub(), 5000, false); pipGraphBuilder = new PipGraph.Builder( EngineSchedule.CreateEmptyPipTable(engineContext), engineContext, Scheduler.Tracing.Logger.Log, loggingContext, config, mountsTable.MountPathExpander, fingerprintSalt: config.Cache.CacheSalt, directoryMembershipFingerprinterRules: new DirectoryMembershipFingerprinterRuleSet(config, engineContext.StringTable)); if (!AddConfigurationMountsAndCompleteInitialization(config, loggingContext, mountsTable)) { return(false); } IDictionary <ModuleId, MountsTable> moduleMountsTableMap; if (!mountsTable.PopulateModuleMounts(config.ModulePolicies.Values, out moduleMountsTableMap)) { Contract.Assume(loggingContext.ErrorWasLogged, "An error should have been logged after MountTable.PopulateModuleMounts()"); return(false); } } else { frontEndEngineAbstraction = new BasicFrontEndEngineAbstraction(frontEndContext.PathTable, frontEndContext.FileSystem, config); } } using (frontEndEngineAbstraction is IDisposable ? (IDisposable)frontEndEngineAbstraction : null) { // Attempt to build and/or analyze the workspace if (!controller.PopulateGraph( cache: cache, graph: pipGraphBuilder, engineAbstraction: frontEndEngineAbstraction, evaluationFilter: evaluationFilter, configuration: config, startupConfiguration: mutableCommandlineConfig.Startup)) { workspace = frontEndHostController.GetWorkspace(); // Error has been reported already return(false); } pipGraph = pipGraphBuilder; } } Contract.Assert(frontEndHostController != null); workspace = frontEndHostController.GetWorkspace(); if (mutableCommandlineConfig.Engine.Phase == EnginePhases.AnalyzeWorkspace) { // If workspace construction is successful, we run the linter on all specs. // This makes sure the workspace will carry all the errors that will occur when running the same specs in the regular engine path workspace = CreateLintedWorkspace( workspace, frontEndContext.LoggingContext, config.FrontEnd, pathTable); } return(true); }
private HashSet <ParsedModule> GetModulesToInclude(Workspace workspace, ISpecDependencyProvider provider, EvaluationFilter evaluationFilter) { var modulesToResolve = new HashSet <string>(evaluationFilter.ModulesToResolve.Select(s => s.ToString(m_pathTable.StringTable))); // Getting all the modules specified via filter var modulesToInclude = workspace.SpecModules.Where(m => modulesToResolve.Contains(m.Descriptor.Name)).ToHashSet(); // Getting all upstream dependencis for all modules. modulesToInclude = TransitiveClosureOfAllRelevantModules(modulesToInclude, workspace, provider); return(modulesToInclude); }
private static bool TryBuildPipGraphFragment( ICommandLineConfiguration commandLineConfig, PipGraphFragmentGeneratorConfiguration pipGraphFragmentGeneratorConfig, FrontEndContext frontEndContext, EngineContext engineContext, EvaluationFilter evaluationFilter) { Contract.Requires(frontEndContext != null); Contract.Requires(engineContext != null); Contract.Requires(commandLineConfig.Startup.ConfigFile.IsValid); Contract.Requires(evaluationFilter != null); var pathTable = engineContext.PathTable; var loggingContext = frontEndContext.LoggingContext; var mutableCommandlineConfig = CompleteCommandLineConfiguration(commandLineConfig); BuildXLEngine.ModifyConfigurationForCloudbuild(mutableCommandlineConfig, false, pathTable, loggingContext); BuildXLEngine.PopulateLoggingAndLayoutConfiguration(mutableCommandlineConfig, pathTable, bxlExeLocation: null); var statistics = new FrontEndStatistics(); var frontEndControllerFactory = FrontEndControllerFactory.Create( mode: FrontEndMode.NormalMode, loggingContext: loggingContext, configuration: mutableCommandlineConfig, collector: null, statistics: statistics); var controller = frontEndControllerFactory.Create(engineContext.PathTable, engineContext.SymbolTable); controller.InitializeHost(frontEndContext, mutableCommandlineConfig); FrontEndHostController frontEndHostController = (FrontEndHostController)controller; var config = controller.ParseConfig(mutableCommandlineConfig); if (config == null) { return(false); } using (var cache = Task.FromResult <Possible <EngineCache> >( new EngineCache( new InMemoryArtifactContentCache(), new EmptyTwoPhaseFingerprintStore()))) { var mountsTable = MountsTable.CreateAndRegister(loggingContext, engineContext, config, mutableCommandlineConfig.Startup.Properties); FrontEndEngineAbstraction frontEndEngineAbstraction = new FrontEndEngineImplementation( loggingContext, frontEndContext.PathTable, config, mutableCommandlineConfig.Startup, mountsTable, InputTracker.CreateDisabledTracker(loggingContext), null, null, () => FileContentTable.CreateStub(loggingContext), 5000, false, controller.RegisteredFrontEnds); var pipGraphBuilder = pipGraphFragmentGeneratorConfig.TopSort ? new PipGraphFragmentBuilderTopSort(engineContext, config, mountsTable.MountPathExpander) : new PipGraphFragmentBuilder(engineContext, config, mountsTable.MountPathExpander); if (!AddConfigurationMountsAndCompleteInitialization(config, loggingContext, mountsTable)) { return(false); } if (!mountsTable.PopulateModuleMounts(config.ModulePolicies.Values, out var moduleMountsTableMap)) { Contract.Assume(loggingContext.ErrorWasLogged, "An error should have been logged after MountTable.PopulateModuleMounts()"); return(false); } using (frontEndEngineAbstraction is IDisposable ? (IDisposable)frontEndEngineAbstraction : null) { if (!controller.PopulateGraph( cache: cache, graph: pipGraphBuilder, engineAbstraction: frontEndEngineAbstraction, evaluationFilter: evaluationFilter, configuration: config, startupConfiguration: mutableCommandlineConfig.Startup)) { // Error should have been reported already return(false); } if (!SerializeFragmentIfRequested(pipGraphFragmentGeneratorConfig, frontEndContext, pipGraphBuilder)) { // Error should have been reported already return(false); } } } return(true); }
internal Workspace DoPhaseBuildWorkspace(IConfiguration configuration, FrontEndEngineAbstraction engineAbstraction, EvaluationFilter evaluationFilter) { if (!TryGetWorkspaceProvider(configuration, out var workspaceProvider, out var failures)) { var workspaceConfiguration = GetWorkspaceConfiguration(configuration); return(Workspace.Failure(workspaceConfiguration: workspaceConfiguration, failures: failures.ToArray())); } var result = TaskUtilities.WithCancellationHandlingAsync( FrontEndContext.LoggingContext, BuildAndFilterWorkspaceAsync(workspaceProvider, engineAbstraction, evaluationFilter), m_logger.FrontEndBuildWorkspacePhaseCanceled, GetOrCreateComputationCancelledWorkspace(workspaceProvider), FrontEndContext.CancellationToken).GetAwaiter().GetResult(); ReportWorkspaceParsingAndBindingErrorsIfNeeded(result); return(result); }
/// <summary> /// Builds and filters the worksapce. /// </summary> /// <remarks> /// This method not just builds the workspace from scratch, but it also tries to compute it in an efficient way. /// If there is a front end snapshot from the previous BuildXL run and the engine gives us a set of changed files, /// then we can build a filtered workspace based on the old spec-2-spec map without parsing the entire world. /// </remarks> private async Task <Workspace> BuildAndFilterWorkspaceAsync(IWorkspaceProvider workspaceProvider, FrontEndEngineAbstraction engineAbstraction, EvaluationFilter evaluationFilter) { // this step downloads nugets too, and that's why we want to do it outside of the progress reporting block below Possible <WorkspaceDefinition> workspaceDefinition = await TryGetWorkspaceDefinitionAsync(workspaceProvider); if (!workspaceDefinition.Succeeded) { return(Workspace.Failure(workspaceProvider, workspaceProvider.Configuration, workspaceDefinition.Failure)); } return(await WithWorkspaceProgressReportingAsync( numSpecs : workspaceDefinition.Result.SpecCount, task : BuildAndFilterWorkspaceAsync(workspaceDefinition.Result, workspaceProvider, engineAbstraction, evaluationFilter))); }
private void FilterWorkspace(Workspace workspace, EvaluationFilter filter) { var workspaceFilter = new WorkspaceFilter(m_pathTable); workspace.FilterWorkspace(workspaceFilter.FilterForConversion(workspace, filter)); }
/// <summary> /// Builds and filters the worksapce. /// </summary> /// <remarks> /// This method not just builds the workspace from scratch, but it also tries to compute it in an efficient way. /// If there is a front end snapshot from the previous BuildXL run and the engine gives us a set of changed files, /// then we can build a filtered workspace based on the old spec-2-spec map without parsing the entire world. /// </remarks> private async Task <Workspace> BuildAndFilterWorkspaceAsync(IWorkspaceProvider workspaceProvider, FrontEndEngineAbstraction engineAbstraction, EvaluationFilter evaluationFilter) { // this step downloads nugets too, and that's why we want to do it outside of the progress reporting block below Possible <WorkspaceDefinition> workspaceDefinition = await TryGetWorkspaceDefinitionAsync(workspaceProvider); if (!workspaceDefinition.Succeeded) { // In some cases even if the workspace failed to build some of the pipeline still tries to continue // Complete mount initialization to enable mount related queries downstream Engine.CompleteMountInitialization(); return(Workspace.Failure(workspaceProvider, workspaceProvider.Configuration, workspaceDefinition.Failure)); } // As soon as we get the workspace definition, we can configure the mount table with the additional mounts modules may have defined and seal it foreach (var module in workspaceDefinition.Result.Modules.Where(module => module.Mounts != null)) { foreach (var mount in module.Mounts) { Engine.AddResolvedModuleDefinedMount(mount, LocationData.Create(module.ModuleConfigFile)); } } // At this point the mount table can be completed if (!Engine.CompleteMountInitialization()) { return(Workspace.Failure(workspaceProvider, workspaceProvider.Configuration, new GenericWorkspaceFailure("Mount points not properly defined. Detailed errors should have been logged."))); } return(await WithWorkspaceProgressReportingAsync( numSpecs : workspaceDefinition.Result.SpecCount, task : BuildAndFilterWorkspaceAsync(workspaceDefinition.Result, workspaceProvider, engineAbstraction, evaluationFilter))); }