private static Process CreateConsoleProcessInContainer( BuildXLContext context, TempFileStorage tempFiles, PathTable pt, string arguments, ReadOnlyArray <FileArtifactWithAttributes> outputFiles, ReadOnlyArray <DirectoryArtifact> directoryOutputs, ContainerIsolationLevel containerIsolationLevel = ContainerIsolationLevel.IsolateAllOutputs) { var executableFileArtifact = FileArtifact.CreateSourceFile(AbsolutePath.Create(context.PathTable, CmdHelper.CmdX64)); var argumentBuilder = new PipDataBuilder(context.PathTable.StringTable); argumentBuilder.Add("/d"); argumentBuilder.Add("/c"); using (argumentBuilder.StartFragment(PipDataFragmentEscaping.CRuntimeArgumentRules, " ")) { foreach (var arg in arguments.Split(new[] { ' ' }, System.StringSplitOptions.RemoveEmptyEntries)) { argumentBuilder.Add(arg); } } string workingDirectory = tempFiles.GetUniqueDirectory(); var workingDirectoryAbsolutePath = AbsolutePath.Create(context.PathTable, workingDirectory); string uniqueOutputDirectory = tempFiles.GetUniqueDirectory(); var uniqueOutputDirectoryPath = AbsolutePath.Create(context.PathTable, uniqueOutputDirectory); string uniqueRedirectedOutputDirectory = tempFiles.GetUniqueDirectory("redirected"); var uniqueRedirectedOutputDirectoryPath = AbsolutePath.Create(context.PathTable, uniqueRedirectedOutputDirectory); var pip = new Process( executableFileArtifact, workingDirectoryAbsolutePath, argumentBuilder.ToPipData(" ", PipDataFragmentEscaping.NoEscaping), FileArtifact.Invalid, PipData.Invalid, ReadOnlyArray <EnvironmentVariable> .FromWithoutCopy(), FileArtifact.Invalid, FileArtifact.Invalid, FileArtifact.Invalid, tempFiles.GetUniqueDirectory(pt), null, null, dependencies: ReadOnlyArray <FileArtifact> .FromWithoutCopy(new[] { executableFileArtifact }), outputs: outputFiles, directoryDependencies: ReadOnlyArray <DirectoryArtifact> .Empty, directoryOutputs: directoryOutputs, orderDependencies: ReadOnlyArray <PipId> .Empty, untrackedPaths: ReadOnlyArray <AbsolutePath> .Empty, untrackedScopes: ReadOnlyArray <AbsolutePath> .Empty, tags: ReadOnlyArray <StringId> .Empty, successExitCodes: ReadOnlyArray <int> .Empty, semaphores: ReadOnlyArray <ProcessSemaphoreInfo> .Empty, provenance: PipProvenance.CreateDummy(context), toolDescription: StringId.Invalid, additionalTempDirectories: ReadOnlyArray <AbsolutePath> .Empty, options: Process.Options.NeedsToRunInContainer, uniqueOutputDirectory: uniqueOutputDirectoryPath, uniqueRedirectedDirectoryRoot: uniqueRedirectedOutputDirectoryPath, containerIsolationLevel: containerIsolationLevel); return(pip); }
private AbsolutePath AbsPath(string path) => AbsolutePath.Create(PathTable, path);
private FileArtifact OutputFile(string path) => FileArtifact.CreateOutputFile(AbsolutePath.Create(PathTable, path));
private static object CreateInstance(BuildXLContext context, Type type, bool booleanDefault) { string path = A("x", "path"); type = GetNonNullableType(type); if (type == typeof(bool)) { return(booleanDefault); } if (type == typeof(double)) { return((double)0.23423); } if (type == typeof(byte)) { return((byte)123); } if (type == typeof(sbyte)) { return((sbyte)123); } if (type == typeof(short)) { return((short)123); } if (type == typeof(ushort)) { return((ushort)123); } if (type == typeof(int)) { return(123); } if (type == typeof(uint)) { return((uint)123); } if (type == typeof(long)) { return((long)123); } if (type == typeof(ulong)) { return((ulong)123); } if (type == typeof(string)) { return("nonDefaultString"); } if (type == typeof(ModuleId)) { return(new ModuleId(123)); } if (type == typeof(LocationData)) { return(new LocationData(AbsolutePath.Create(context.PathTable, path), 12, 23)); } if (type == typeof(AbsolutePath)) { return(AbsolutePath.Create(context.PathTable, path)); } if (type == typeof(RelativePath)) { string relativePath = R("rel1", "dir1", "path"); return(RelativePath.Create(context.StringTable, relativePath)); } if (type == typeof(FileArtifact)) { return(FileArtifact.CreateSourceFile(AbsolutePath.Create(context.PathTable, path))); } if (type == typeof(PathAtom)) { return(PathAtom.Create(context.StringTable, "atom")); } if (type == typeof(global::BuildXL.Utilities.LineInfo)) { return(new global::BuildXL.Utilities.LineInfo(1, 1)); } if (type.GetTypeInfo().IsEnum) { bool first = true; foreach (var value in Enum.GetValues(type)) { if (!first) { return(value); } first = false; } XAssert.Fail($"Enum {type.FullName} doesn't have more than one value, so can't pick the second one."); } if (type.GetTypeInfo().IsGenericType) { var generic = type.GetGenericTypeDefinition(); if (generic == typeof(IReadOnlyList <>)) { // Treat IReadOnlyList as if it was List type = typeof(List <>).MakeGenericType(type.GenericTypeArguments[0]); generic = type.GetGenericTypeDefinition(); } if (generic == typeof(List <>)) { var newList = (IList)Activator.CreateInstance(type); newList.Add(CreateInstance(context, type.GenericTypeArguments[0], booleanDefault)); return(newList); } if (generic == typeof(IReadOnlyDictionary <,>)) { // Treat IReadOnlyList as if it was List type = typeof(Dictionary <,>).MakeGenericType(type.GenericTypeArguments[0], type.GenericTypeArguments[1]); generic = type.GetGenericTypeDefinition(); } if (generic == typeof(Dictionary <,>)) { var newDictionary = (IDictionary)Activator.CreateInstance(type); newDictionary.Add( CreateInstance(context, type.GenericTypeArguments[0], booleanDefault), CreateInstance(context, type.GenericTypeArguments[1], booleanDefault)); return(newDictionary); } } if (type.GetTypeInfo().IsInterface) { // Treat interfaces as if it was the mutable class type = ConfigurationConverter.FindImplementationType( type, ObjectLiteral.Create(new List <Binding>(), default(LineInfo), AbsolutePath.Invalid), // Return a SourceResolver to instantiate () => "SourceResolver"); } if (type.GetTypeInfo().IsClass) { var instance = Activator.CreateInstance(type); PopulateObject(context, type, instance, booleanDefault); return(instance); } XAssert.Fail($"Don't know how to create objects for this type: {type.FullName}."); return(null); }
/// <summary> /// Run the test /// </summary> public bool Run(string testFolder, string specFile, string fullIdentifier, string shortName, string lkgFile, params string[] sdksToResolve) { Contract.Requires(!string.IsNullOrEmpty(testFolder)); Contract.Requires(!string.IsNullOrEmpty(specFile)); Contract.Requires(sdksToResolve != null); // Sadly the frontend doesn't use the engine abstractions file api's so we have to materialize stuff on disk for now... // TODO: Fix this code once the frontend supports a proper virtual FileSystem. // TODO: Change the package semantics to implicit when we expose a way to evaluate a single value var testFileName = Path.GetFileName(specFile); var mainFileName = "testMain.bp"; var testMainFile = Path.Combine(testFolder, mainFileName); Directory.CreateDirectory(testFolder); File.WriteAllText(Path.Combine(testFolder, Names.ModuleConfigBm), I($@"module( {{ name: 'TestPackage', nameResolutionSemantics: NameResolutionSemantics.implicitProjectReferences, projects: [ f`{mainFileName}`, f`{testFileName}`, ], }});")); File.WriteAllText(testMainFile, I($@" export const testFolder = d`{Path.GetDirectoryName(specFile).Replace('\\', '/')}`; @@public export const main = {fullIdentifier}();")); File.Copy(specFile, Path.Combine(testFolder, testFileName)); // Create a fake package for Sdk.TestRunner so that you can safely test packages that have the tests embedded in them. var testRunnerFolder = Path.Combine(testFolder, "Sdk.TestRunner"); Directory.CreateDirectory(testRunnerFolder); File.WriteAllText(Path.Combine(testRunnerFolder, Names.ModuleConfigBm), I($"module({{\n\tname: 'Sdk.TestRunner',\n}});")); File.WriteAllText(Path.Combine(testRunnerFolder, "package" + Names.DotDscExtension), I($@" export interface TestArguments {{ testFiles: File[]; sdkFolders?: (Directory|StaticDirectory)[]; autoFixLkgs?: boolean; }} export interface TestResult {{ xmlResults: File; }} export function test(args: TestArguments): TestResult {{ Contract.fail(""Can't run a DScript UnitTest inside of a DScript UnitTest""); }}")); // Setup Context and configuration var frontEndContext = FrontEndContext.CreateInstanceForTesting(); var pipContext = new SchedulerContext(CancellationToken.None, frontEndContext.StringTable, frontEndContext.PathTable, frontEndContext.SymbolTable, frontEndContext.QualifierTable); var pathTable = frontEndContext.PathTable; var testFolderPath = AbsolutePath.Create(pathTable, testFolder); var configuration = CreateConfiguration(sdksToResolve.Union(new[] { testRunnerFolder }), pathTable, testFolderPath); var engineAbstraction = new TestEngineAbstraction(pathTable, frontEndContext.StringTable, testFolderPath, new PassThroughFileSystem(pathTable)); var frontEndStatistics = new FrontEndStatistics(); if (!CreateFactories( frontEndContext, engineAbstraction, frontEndStatistics, configuration, out var ambientTesting, out var moduleRegistry, out var frontEndFactory)) { return(false); } // Set the timeout to a large number to avoid useless performance collections in tests. using (var performanceCollector = new PerformanceCollector(TimeSpan.FromHours(1))) using (var frontEndHostController = new FrontEndHostController( frontEndFactory, new EvaluationScheduler(1), moduleRegistry, frontEndStatistics, m_tracingLogger, performanceCollector, collectMemoryAsSoonAsPossible: true)) { var frontEndController = (IFrontEndController)frontEndHostController; frontEndController.InitializeHost(frontEndContext, configuration); frontEndController.ParseConfig(configuration); // Populate the graph using (var pipTable = new PipTable( pipContext.PathTable, pipContext.SymbolTable, initialBufferSize: 16384, maxDegreeOfParallelism: 1, debug: true)) { var mountPathExpander = new MountPathExpander(pathTable); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "testFolder"), testFolderPath, allowHashing: true, readable: true, writable: false)); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "src"), testFolderPath.Combine(pathTable, "src"), allowHashing: true, readable: true, writable: true)); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "out"), testFolderPath.Combine(pathTable, "out"), allowHashing: true, readable: true, writable: true)); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "noRead"), testFolderPath.Combine(pathTable, "noRead"), allowHashing: true, readable: false, writable: true)); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "temp"), engineAbstraction.Layout.TempDirectory, allowHashing: true, readable: true, writable: true)); mountPathExpander.Add(pathTable, new SemanticPathInfo(PathAtom.Create(frontEndContext.StringTable, "obj"), engineAbstraction.Layout.ObjectDirectory, allowHashing: true, readable: true, writable: true)); var graph = new PipGraph.Builder( pipTable, pipContext, m_pipLogger, frontEndContext.LoggingContext, configuration, mountPathExpander); using (var cacheLayer = new EngineCache( new InMemoryArtifactContentCache(), new InMemoryTwoPhaseFingerprintStore())) { var cache = Task.FromResult(Possible.Create(cacheLayer)); try { var evaluationFilter = new EvaluationFilter( pipContext.SymbolTable, pipContext.PathTable, new FullSymbol[0], new[] { AbsolutePath.Create(frontEndContext.PathTable, testMainFile), }, CollectionUtilities.EmptyArray <StringId>()); if (!frontEndController.PopulateGraph(cache, graph, engineAbstraction, evaluationFilter, configuration, configuration.Startup)) { HandleDiagnostics(); return(false); } } catch (AggregateException e) { var baseException = e.GetBaseException(); if (baseException is XunitException) { // If it is an XUnit assert, then unwrap the exception and throw that because XUnit other doesn't display the error nicely. ExceptionDispatchInfo.Capture(baseException).Throw(); } throw; } } if (!ValidatePips(frontEndContext, graph, testFolderPath, specFile, shortName, lkgFile, ambientTesting.DontValidatePipsEnabled)) { return(false); } } } HandleDiagnostics(); return(true); }
private FileOrDirectoryArtifact File(string path) { return(FileArtifact.CreateSourceFile(AbsolutePath.Create(Context.PathTable, path))); }
public AbsolutePath GetPath(string path) { return(AbsolutePath.Create(m_pathTable, path)); }
public void RemoveExtension() { var pt = new PathTable(); // remove a single char extension AbsolutePath ap1 = AbsolutePath.Create(pt, @"/a.c"); AbsolutePath ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(@"/a", ap2.ToString(pt)); // remove a multi char extension ap1 = AbsolutePath.Create(pt, @"/a.cpp"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(@"/a", ap2.ToString(pt)); // remove nothing ap1 = AbsolutePath.Create(pt, @"/a"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(ap1, ap2); // remove a single char extension ap1 = AbsolutePath.Create(pt, @"/ab.c"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(@"/ab", ap2.ToString(pt)); // remove a multi char extension ap1 = AbsolutePath.Create(pt, @"/ab.cpp"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(@"/ab", ap2.ToString(pt)); // remove nothing ap1 = AbsolutePath.Create(pt, @"/ab"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(ap1, ap2); // remove a single char extension ap1 = AbsolutePath.Create(pt, @"/xyz/ab.c"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(@"/xyz/ab", ap2.ToString(pt)); // remove a multi char extension ap1 = AbsolutePath.Create(pt, @"/xyz/ab.cpp"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(@"/xyz/ab", ap2.ToString(pt)); // remove nothing ap1 = AbsolutePath.Create(pt, @"/xyz/ab"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(ap1, ap2); // remove a single char extension ap1 = AbsolutePath.Create(pt, @"/xyz/ab.xyz.c"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(@"/xyz/ab.xyz", ap2.ToString(pt)); // remove a multi char extension ap1 = AbsolutePath.Create(pt, @"/xyz/ab.xyz.cpp"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(@"/xyz/ab.xyz", ap2.ToString(pt)); ap1 = AbsolutePath.Create(pt, @"/xyz/.cpp"); ap2 = ap1.RemoveExtension(pt); XAssert.AreEqual(ap1, ap2); }
public void ChangeExtension() { var pt = new PathTable(); // change a single char extension AbsolutePath ap1 = AbsolutePath.Create(pt, @"/a.c"); AbsolutePath ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/a.d", ap2.ToString(pt)); // change a multi char extension ap1 = AbsolutePath.Create(pt, @"/a.cpp"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/a.d", ap2.ToString(pt)); // change nothing ap1 = AbsolutePath.Create(pt, @"/a"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/a.d", ap2.ToString(pt)); // change a single char extension ap1 = AbsolutePath.Create(pt, @"/ab.c"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/ab.d", ap2.ToString(pt)); // change a multi char extension ap1 = AbsolutePath.Create(pt, @"/ab.cpp"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/ab.d", ap2.ToString(pt)); // change nothing ap1 = AbsolutePath.Create(pt, @"/ab"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/ab.d", ap2.ToString(pt)); // change a single char extension ap1 = AbsolutePath.Create(pt, @"/xyz/ab.c"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/xyz/ab.d", ap2.ToString(pt)); // change a multi char extension ap1 = AbsolutePath.Create(pt, @"/xyz/ab.cpp"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/xyz/ab.d", ap2.ToString(pt)); // change nothing ap1 = AbsolutePath.Create(pt, @"/xyz/ab"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/xyz/ab.d", ap2.ToString(pt)); // change a single char extension ap1 = AbsolutePath.Create(pt, @"/xyz/ab.xyz.c"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/xyz/ab.xyz.d", ap2.ToString(pt)); // change a multi char extension ap1 = AbsolutePath.Create(pt, @"/xyz/ab.xyz.cpp"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/xyz/ab.xyz.d", ap2.ToString(pt)); ap1 = AbsolutePath.Create(pt, @"/xyz/.cpp"); ap2 = ap1.ChangeExtension(pt, PathAtom.Create(pt.StringTable, ".d")); XAssert.AreEqual(@"/xyz/.d", ap2.ToString(pt)); ap1 = AbsolutePath.Create(pt, @"/xyz/a.cpp"); ap2 = ap1.ChangeExtension(pt, PathAtom.Invalid); XAssert.AreEqual(@"/xyz/a", ap2.ToString(pt)); }
/// <summary> /// <see cref="RecordFileWrite(PathTable, AbsolutePath, bool)"/> /// </summary> public bool RecordFileWrite(PathTable pathTable, string absolutePath, bool flushImmediately) => RecordFileWrite(pathTable, AbsolutePath.Create(pathTable, absolutePath), flushImmediately);
private AbsolutePath CreateAbsolutePath(string path) { return(AbsolutePath.Create(PathTable, path)); }
private static DirectoryTranslator.RawInputTranslation CreateInputTranslation(PathTable pathTable, string[] source, string[] target) { string sourceAbsolute = A(source); string targetAbsolute = A(target); return(DirectoryTranslator.RawInputTranslation.Create(AbsolutePath.Create(pathTable, sourceAbsolute), AbsolutePath.Create(pathTable, targetAbsolute))); }
private static void AssertEqualTranslatedPath(DirectoryTranslator translator, PathTable pathTable, string[] expected, string[] path) { string expectedAbsolute = A(expected); string pathAbsolute = A(path); XAssert.AreEqual(AbsolutePath.Create(pathTable, expectedAbsolute), translator.Translate(AbsolutePath.Create(pathTable, pathAbsolute), pathTable)); }
public async Task TestHistoricMetadataPathStringRoundtrip() { LoggingContext loggingContext = CreateLoggingContextForTest(); PipExecutionContext context; HistoricMetadataCache cache = null; var hmcFolderName = "hmc"; for (int i = 0; i < 3; i++) { CreateHistoricCache(loggingContext, hmcFolderName, out context, out cache, out var memoryArtifactCache); var process1 = SchedulerTest.CreateDummyProcess(context, new PipId(1)); var process2 = SchedulerTest.CreateDummyProcess(context, new PipId(2)); var pathTable = context.PathTable; // Add some random paths to ensure path table indices are different after loading AbsolutePath.Create(pathTable, X("/H/aslj/sfas/832.stxt")); AbsolutePath.Create(pathTable, X("/R/f/s/Historic")); AbsolutePath.Create(pathTable, X("/M/hgf/sf4as/83afsd")); AbsolutePath.Create(pathTable, X("/Z/bd/sfas/Cache")); var abPath1 = AbsolutePath.Create(pathTable, X("/H/aslj/sfas/p1OUT.bin")); var abPath2 = AbsolutePath.Create(pathTable, X("/H/aslj/sfas/P2.txt")); var pathSet1 = ObservedPathSetTestUtilities.CreatePathSet( pathTable, X("/X/a/b/c"), X("/X/d/e"), X("/X/a/b/c/d")); PipCacheDescriptorV2Metadata metadata1 = new PipCacheDescriptorV2Metadata { StaticOutputHashes = new List <AbsolutePathFileMaterializationInfo> { new AbsolutePathFileMaterializationInfo { AbsolutePath = abPath1.GetName(pathTable).ToString(context.StringTable), Info = new BondFileMaterializationInfo { FileName = "p1OUT.bin" } } } }; var storedPathSet1 = await cache.TryStorePathSetAsync(pathSet1, preservePathCasing : false); var storedMetadata1 = await cache.TryStoreMetadataAsync(metadata1); var weakFingerprint1 = new WeakContentFingerprint(FingerprintUtilities.CreateRandom()); var strongFingerprint1 = new StrongContentFingerprint(FingerprintUtilities.CreateRandom()); var cacheEntry = new CacheEntry(storedMetadata1.Result, nameof(HistoricMetadataCacheTests), ArrayView <ContentHash> .Empty); var publishedCacheEntry = await cache.TryPublishCacheEntryAsync(process1, weakFingerprint1, storedPathSet1.Result, strongFingerprint1, cacheEntry); var pathSet2 = ObservedPathSetTestUtilities.CreatePathSet( pathTable, X("/F/a/y/c"), X("/B/d/e"), X("/G/a/z/c/d"), X("/B/a/b/c")); PipCacheDescriptorV2Metadata metadata2 = new PipCacheDescriptorV2Metadata { StaticOutputHashes = new List <AbsolutePathFileMaterializationInfo> { new AbsolutePathFileMaterializationInfo { AbsolutePath = abPath2.ToString(pathTable), Info = new BondFileMaterializationInfo { FileName = abPath2.GetName(pathTable).ToString(context.StringTable) } } }, DynamicOutputs = new List <List <RelativePathFileMaterializationInfo> > { new List <RelativePathFileMaterializationInfo> { new RelativePathFileMaterializationInfo { RelativePath = @"dir\P2Dynamic.txt", Info = new BondFileMaterializationInfo { FileName = "p2dynamic.txt" } }, new RelativePathFileMaterializationInfo { RelativePath = @"dir\P2dynout2.txt", Info = new BondFileMaterializationInfo { FileName = null } } } } }; var storedPathSet2 = await cache.TryStorePathSetAsync(pathSet2, preservePathCasing : false); var storedMetadata2 = await cache.TryStoreMetadataAsync(metadata2); var cacheEntry2 = new CacheEntry(storedMetadata2.Result, nameof(HistoricMetadataCacheTests), ArrayView <ContentHash> .Empty); var strongFingerprint2 = new StrongContentFingerprint(FingerprintUtilities.CreateRandom()); var publishedCacheEntry2 = await cache.TryPublishCacheEntryAsync(process1, weakFingerprint1, storedPathSet2.Result, strongFingerprint2, cacheEntry2); await cache.CloseAsync(); memoryArtifactCache.Clear(); PipExecutionContext loadedContext; HistoricMetadataCache loadedCache; TaskSourceSlim <bool> loadCompletionSource = TaskSourceSlim.Create <bool>(); TaskSourceSlim <bool> loadCalled = TaskSourceSlim.Create <bool>(); BoxRef <bool> calledLoad = new BoxRef <bool>(); CreateHistoricCache(loggingContext, "hmc", out loadedContext, out loadedCache, out memoryArtifactCache, loadTask: async hmc => { loadCalled.SetResult(true); await loadCompletionSource.Task; }); var operationContext = OperationContext.CreateUntracked(loggingContext); var retrievePathSet1Task = loadedCache.TryRetrievePathSetAsync(operationContext, WeakContentFingerprint.Zero, storedPathSet1.Result); var retrievdMetadata1Task = loadedCache.TryRetrieveMetadataAsync( process1, WeakContentFingerprint.Zero, StrongContentFingerprint.Zero, storedMetadata1.Result, storedPathSet1.Result); var getCacheEntry1Task = loadedCache.TryGetCacheEntryAsync( process1, weakFingerprint1, storedPathSet1.Result, strongFingerprint1); Assert.False(retrievePathSet1Task.IsCompleted, "Before load task completes. TryRetrievePathSetAsync operations should block"); Assert.False(retrievdMetadata1Task.IsCompleted, "Before load task completes. TryRetrieveMetadataAsync operations should block"); Assert.False(getCacheEntry1Task.IsCompleted, "Before load task completes. TryGetCacheEntryAsync operations should block"); Assert.True(loadCalled.Task.Wait(TimeSpan.FromSeconds(10)) && loadCalled.Task.Result, "Load should have been called in as a result of querying"); loadCompletionSource.SetResult(true); var maybeLoadedPathSet1 = await retrievePathSet1Task; var maybeLoadedMetadata1 = await retrievdMetadata1Task; var maybeLoadedCacheEntry1 = await getCacheEntry1Task; Assert.Equal(storedMetadata1.Result, maybeLoadedCacheEntry1.Result.Value.MetadataHash); var maybeLoadedPathSet2 = await loadedCache.TryRetrievePathSetAsync(operationContext, WeakContentFingerprint.Zero, storedPathSet2.Result); var maybeLoadedMetadata2 = await loadedCache.TryRetrieveMetadataAsync( process2, WeakContentFingerprint.Zero, StrongContentFingerprint.Zero, storedMetadata2.Result, storedPathSet2.Result); AssertPathSetEquals(pathTable, pathSet1, loadedContext.PathTable, maybeLoadedPathSet1.Result); AssertPathSetEquals(pathTable, pathSet2, loadedContext.PathTable, maybeLoadedPathSet2.Result); AssertMetadataEquals(metadata1, maybeLoadedMetadata1.Result); AssertMetadataEquals(metadata2, maybeLoadedMetadata2.Result); await loadedCache.CloseAsync(); } }
/// <summary> /// Remaps a path produced by a fragment to path of the resulting graph. /// </summary> /// <param name="fragment">Fragment where the path originates.</param> /// <param name="path">A path.</param> /// <returns></returns> private AbsolutePath RemapFragmentPath(TestPipGraphFragment fragment, AbsolutePath path) => AbsolutePath.Create(Context.PathTable, path.ToString(fragment.Context.PathTable));
public override int Analyze() { var rootExpander = new RootExpander(PathTable); HashSet <ContentHash> hashes = new HashSet <ContentHash>(); hashes.Add(ContentHashingUtilities.ZeroHash); HashSet <FileArtifact> files = new HashSet <FileArtifact>(); foreach (var root in Roots) { rootExpander.Add(AbsolutePath.Create(PathTable, root.Key), root.Value); } Func <AbsolutePath, string> expandRoot = absPath => absPath.ToString(PathTable, rootExpander); var orderedPips = CachedGraph.PipGraph.RetrievePipReferencesOfType(PipType.Process) .Where(lazyPip => TargetSemiStableHash == null || TargetSemiStableHash == lazyPip.SemiStableHash) .Select(lazyPip => (Process)lazyPip.HydratePip()) .ToLookup(process => process.Provenance.Token.Path) .OrderBy(grouping => grouping.Key.ToString(PathTable, rootExpander)) .ToList(); using (var fingerprintStream = File.Create(DumpFilePath, bufferSize: 64 << 10 /* 64 KB */)) using (var hashWriter = new StreamWriter(DumpFilePath + ".hashes.txt")) { using ( var fingerprintArchive = CompressFile ? new ZipArchive(fingerprintStream, ZipArchiveMode.Create) : null) { using ( var writer = XmlWriter.Create( CompressFile ? fingerprintArchive.CreateEntry("dump.xml", CompressionLevel.Fastest).Open() : fingerprintStream, new XmlWriterSettings() { Indent = true })) { int doneProcesses = 0; var t = new Timer( o => { var done = doneProcesses; Console.WriteLine("Processes Done: {0} of {1}", done, orderedPips.Count); }, null, 5000, 5000); try { writer.WriteStartElement("ProcessDump"); writer.WriteAttributeString("Count", orderedPips.Count.ToString(CultureInfo.InvariantCulture)); foreach (var specPipGroup in orderedPips) { writer.WriteStartElement("SpecFile"); writer.WriteAttributeString("Path", specPipGroup.Key.ToString(PathTable, rootExpander)); foreach (var pip in specPipGroup) { doneProcesses++; writer.WriteStartElement("Process"); writer.WriteAttributeString("Name", pip.Executable.Path.ToString(PathTable, rootExpander)); writer.WriteAttributeString("CMD", RenderProcessArguments(pip)); writer.WriteElementString("Description", pip.GetDescription(PipGraph.Context)); writer.WriteStartElement("EnvironmentVariables"); foreach (var environmentVariable in pip.EnvironmentVariables) { writer.WriteStartElement("Environment"); writer.WriteAttributeString("Name", environmentVariable.Name.ToString(PathTable.StringTable)); if (environmentVariable.Value.IsValid) { writer.WriteAttributeString("Value", environmentVariable.Value.ToString(expandRoot, PathTable.StringTable, PipData.MaxMonikerRenderer)); } else { writer.WriteAttributeString("Value", "Unset"); } writer.WriteEndElement(); } writer.WriteEndElement(); writer.WriteStartElement("Dependencies"); foreach (var input in pip.Dependencies) { writer.WriteStartElement("Item"); writer.WriteAttributeString("Path", input.Path.ToString(PathTable, rootExpander)); writer.WriteAttributeString("Hash", m_fileHashes.GetOrAdd(input, ContentHashingUtilities.ZeroHash).Item.Value.ToString()); writer.WriteAttributeString("RewriteCount", input.RewriteCount.ToString()); writer.WriteEndElement(); } writer.WriteEndElement(); writer.WriteStartElement("DirectoryDependencies"); foreach (var input in pip.DirectoryDependencies) { writer.WriteStartElement("Item"); writer.WriteAttributeString("Path", input.Path.ToString(PathTable, rootExpander)); var kind = PipTable.GetSealDirectoryKind(PipGraph.GetSealedDirectoryNode(input).ToPipId()); writer.WriteAttributeString("Kind", kind.ToString()); // Print directory dependency file details when dumping a specific process if (TargetSemiStableHash != null && (kind == SealDirectoryKind.Full || kind == SealDirectoryKind.Partial)) { foreach (var file in PipGraph.ListSealedDirectoryContents(input)) { writer.WriteStartElement("Item"); writer.WriteAttributeString("Path", file.Path.ToString(PathTable, rootExpander)); writer.WriteAttributeString("Hash", m_fileHashes.GetOrAdd(file, ContentHashingUtilities.ZeroHash).Item.Value.ToString()); writer.WriteAttributeString("RewriteCount", file.RewriteCount.ToString()); writer.WriteEndElement(); } } else if (m_contents.TryGetValue(input, out var contents)) { m_observedInputs.TryGetValue(pip.PipId.Value, out var observedInputs); foreach (var file in contents) { // skip the files that were not accessed if (observedInputs != null && !observedInputs.Contains(file.Path)) { continue; } writer.WriteStartElement("Item"); writer.WriteAttributeString("Path", file.Path.ToString(PathTable, rootExpander)); writer.WriteAttributeString("Hash", m_fileHashes.GetOrAdd(file, ContentHashingUtilities.ZeroHash).Item.Value.ToString()); writer.WriteAttributeString("RewriteCount", file.RewriteCount.ToString()); writer.WriteEndElement(); } } writer.WriteEndElement(); } writer.WriteEndElement(); writer.WriteStartElement("Outputs"); foreach (var input in pip.FileOutputs) { writer.WriteStartElement("Item"); if (input.RewriteCount > 1) { writer.WriteAttributeString("RewriteCount", input.RewriteCount.ToString()); } writer.WriteString(input.Path.ToString(PathTable, rootExpander)); writer.WriteEndElement(); } writer.WriteEndElement(); writer.WriteStartElement("DirectoryOutputs"); foreach (var output in pip.DirectoryOutputs) { writer.WriteStartElement("Directory"); { writer.WriteAttributeString("Path", output.Path.ToString(PathTable, rootExpander)); if (m_contents.TryGetValue(output, out var contents)) { writer.WriteStartElement("Contents"); { foreach (var file in contents) { writer.WriteStartElement("Item"); if (file.RewriteCount > 1) { writer.WriteAttributeString("RewriteCount", file.RewriteCount.ToString()); } writer.WriteString(file.Path.ToString(PathTable, rootExpander)); writer.WriteEndElement(); } } writer.WriteEndElement(); } } writer.WriteEndElement(); } writer.WriteEndElement(); if (pip.TempDirectory.IsValid) { writer.WriteElementString("TempDirectory", pip.TempDirectory.ToString(PathTable, rootExpander)); } writer.WriteStartElement("AdditionalTempDirectories"); foreach (var item in pip.AdditionalTempDirectories) { writer.WriteElementString("Item", item.ToString(PathTable, rootExpander)); } writer.WriteEndElement(); writer.WriteEndElement(); // Process } writer.WriteEndElement(); // SpecFile } writer.WriteEndElement(); // ProcessDump } finally { // kill and wait for the status timer to die... using (var e = new AutoResetEvent(false)) { t.Dispose(e); e.WaitOne(); } } } } } return(0); }
public void TestSerialization(FileExistence fileExistence) { var pathTable = new PathTable(); var fileArtifact = FileArtifactWithAttributes.Create(FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, A("c", "foo.txt"))), fileExistence); HasTheSamePathAndExistence(fileArtifact, CloneViaSerialization(fileArtifact)); // Write count is not affected by serialization/deserialization HasTheSamePathAndExistence(fileArtifact, CloneViaSerialization(fileArtifact.CreateNextWrittenVersion())); }
public void ScrubFilesNotInContentsWhenFullySealDir(bool shouldScrub) { // simulating two builds here, // the second build should scrub the stray files // if they are in a fully sealed directory var sourceDir = DirectoryArtifact.CreateWithZeroPartialSealId(CreateUniqueDirectory()); var sourceDirStr = ArtifactToString(sourceDir); var file1 = CreateOutputFileArtifact(sourceDirStr); var file1Str = ArtifactToString(file1); var file2 = CreateOutputFileArtifact(sourceDirStr); var file2Str = ArtifactToString(file2); var sealDir = DirectoryArtifact.CreateWithZeroPartialSealId(CreateUniqueDirectory()); var sealDirStr = ArtifactToString(sealDir); var nestedDir1 = AbsolutePath.Create(Context.PathTable, Path.Combine(sealDirStr, "nested1")); var nestedDir1Str = nestedDir1.ToString(Context.PathTable); var nestedDir2 = AbsolutePath.Create(Context.PathTable, Path.Combine(sealDirStr, "nested2")); var nestedDir2Str = nestedDir2.ToString(Context.PathTable); var sealFile1 = CreateOutputFileArtifact(nestedDir1Str); var sealFile1Str = ArtifactToString(sealFile1); var sealFile2 = CreateOutputFileArtifact(nestedDir2Str); var sealFile2Str = ArtifactToString(sealFile2); // the first build depends on file1 and file2 var builderA = CreatePipBuilder(new Operation[] { Operation.WriteFile(file1, "First build write file1"), Operation.WriteFile(file2, "First build write file2"), }); SchedulePipBuilder(builderA); var builderB = CreatePipBuilder(new Operation[] { Operation.CopyFile(file1, sealFile1), Operation.CopyFile(file2, sealFile2), }); SchedulePipBuilder(builderB); SealDirectory(sealDir, SealDirectoryKind.Full, true, sealFile1, sealFile2); RunScheduler().AssertSuccess(); XAssert.IsTrue(File.Exists(sealFile1Str), $"File in the content list when seal was supposed to exist: {sealFile1Str}"); XAssert.IsTrue(File.Exists(sealFile2Str), $"File in the content list when seal was supposed to exist: {sealFile2Str}"); ResetPipGraphBuilder(); // the second build only depends on file1 var builderC = CreatePipBuilder(new Operation[] { Operation.WriteFile(file1, "Second build write file1"), }); SchedulePipBuilder(builderC); var builderD = CreatePipBuilder(new Operation[] { Operation.CopyFile(file1, sealFile1), }); SchedulePipBuilder(builderD); SealDirectory(sealDir, SealDirectoryKind.Full, shouldScrub, sealFile1); var opslist = new List <Operation>(); opslist.Add(Operation.ReadFile(sealFile1)); opslist.Add(Operation.WriteFile(CreateOutputFileArtifact())); if (!shouldScrub) { opslist.Add(Operation.ReadFile(sealFile2, true)); } var builderE = CreatePipBuilder(opslist); SchedulePipBuilder(builderE); XAssert.IsTrue(File.Exists(sealFile1Str), $"File in the content list when seal was supposed to exist: {sealFile1Str}"); if (shouldScrub) { RunScheduler().AssertSuccess(); XAssert.IsFalse(Directory.Exists(nestedDir2Str), $"unseal directory wasn't supposed to exist: {nestedDir2Str}"); XAssert.IsFalse(File.Exists(sealFile2Str), $"File not in the content list when seal wasn't supposed to exist: {sealFile2Str}"); } else { RunScheduler().AssertFailure(); AssertErrorEventLogged(EventId.FileMonitoringError); AssertVerboseEventLogged(EventId.PipProcessDisallowedFileAccess); AssertWarningEventLogged(EventId.ProcessNotStoredToCacheDueToFileMonitoringViolations); XAssert.IsTrue(Directory.Exists(nestedDir2Str), $"unseal directory was supposed to exist: {nestedDir2Str}"); XAssert.IsTrue(File.Exists(sealFile2Str), $"File not in the content list when seal was supposed to exist: {sealFile2Str}"); } }
private FileOrDirectoryArtifact Dir(string path) { return(DirectoryArtifact.CreateWithZeroPartialSealId(AbsolutePath.Create(Context.PathTable, path))); }
public void ScrubFileDirectoriesWithPipGraph() { string rootDirectory = Path.Combine(TemporaryDirectory, nameof(ScrubFileDirectoriesWithPipGraph)); string sourceRoot = Path.Combine(rootDirectory, "Src"); string outputRoot = Path.Combine(rootDirectory, "Out"); string targetRoot = Path.Combine(rootDirectory, "Target"); var pathTable = new PathTable(); using (TestEnv env = TestEnv.CreateTestEnvWithPausedScheduler( new List <IMount> { new Mount() { Name = PathAtom.Create(pathTable.StringTable, "testRoot"), Path = AbsolutePath.Create(pathTable, TemporaryDirectory), IsWritable = true, IsReadable = true, IsScrubbable = true, AllowCreateDirectory = true, } }, pathTable) ) { string inputFilePath = Path.Combine(sourceRoot, "input.txt"); WriteFile(inputFilePath); string outputFilePath = Path.Combine(outputRoot, "output.txt"); WriteFile(outputFilePath); string tempOutputDirectoryPath = Path.Combine(outputRoot, "TempOutDir"); string tempOutputPath = Path.Combine(tempOutputDirectoryPath, "tempOutputInDir.txt"); Directory.CreateDirectory(tempOutputDirectoryPath); string optionalOutputDirectoryPath = Path.Combine(outputRoot, "OptionalOutDir"); string optionalOutputPath = Path.Combine(optionalOutputDirectoryPath, "optionalOutputInDir.txt"); Directory.CreateDirectory(optionalOutputDirectoryPath); string targetFileInOutputDirectoryPath = Path.Combine(targetRoot, "targetInDir.txt"); WriteFile(targetFileInOutputDirectoryPath); string outputDirectoryPath = Path.Combine(outputRoot, "OutDir"); string outputFileInOutputDirectoryPath = Path.Combine(outputDirectoryPath, "outputInDir.txt"); WriteFile(outputFileInOutputDirectoryPath); string sharedOutputDirectoryPath = Path.Combine(outputRoot, "SharedOutDir"); string outputFileInOutputSharedDirectoryPath = Path.Combine(sharedOutputDirectoryPath, "outputInSharedDir.txt"); WriteFile(outputFileInOutputSharedDirectoryPath); string junkOutputPath = Path.Combine(outputRoot, "junk.txt"); WriteFile(junkOutputPath); string junkOutputInOutputDirectoryPath = Path.Combine(outputDirectoryPath, "junkInDir.txt"); WriteFile(junkOutputInOutputDirectoryPath); string junkTempOutputPath = Path.Combine(tempOutputDirectoryPath, "junkTempOutput.txt"); WriteFile(junkTempOutputPath); string junkOptionalOutputPath = Path.Combine(optionalOutputDirectoryPath, "junkOptionalOutput.txt"); WriteFile(junkOptionalOutputPath); string junkDirectoryPath = Path.Combine(outputRoot, "JunkDir"); string junkFileInJunkDirectoryPath = Path.Combine(junkDirectoryPath, "junkInJunkDir.txt"); WriteFile(junkFileInJunkDirectoryPath); var pipBuilder = CreatePipBuilderWithTag(env, nameof(ScrubFileDirectoriesWithPipGraph)); FileArtifact input = env.Paths.CreateSourceFile(env.Paths.CreateAbsolutePath(inputFilePath)); pipBuilder.AddInputFile(input); AbsolutePath output = env.Paths.CreateAbsolutePath(outputFilePath); pipBuilder.AddOutputFile(output); AbsolutePath tempOutput = env.Paths.CreateAbsolutePath(tempOutputPath); pipBuilder.AddOutputFile(tempOutput, FileExistence.Temporary); AbsolutePath optionalOutput = env.Paths.CreateAbsolutePath(optionalOutputPath); pipBuilder.AddOutputFile(optionalOutput, FileExistence.Optional); AbsolutePath outputDirectory = env.Paths.CreateAbsolutePath(outputDirectoryPath); pipBuilder.AddOutputDirectory(outputDirectory); AbsolutePath targetRootAbsolutePath = env.Paths.CreateAbsolutePath(targetRoot); pipBuilder.AddOutputDirectory(targetRootAbsolutePath); AbsolutePath sharedOutputDirectory = env.Paths.CreateAbsolutePath(sharedOutputDirectoryPath); pipBuilder.AddOutputDirectory(sharedOutputDirectory, SealDirectoryKind.SharedOpaque); env.PipConstructionHelper.AddProcess(pipBuilder); PipGraph pipGraph = AssertSuccessGraphBuilding(env); RunScrubberWithPipGraph(env, pipGraph, pathsToScrub: new[] { outputRoot, targetRoot }); // All non-junk files/directories should be preserved, except ... (see below) XAssert.IsTrue(File.Exists(inputFilePath)); XAssert.IsTrue(File.Exists(outputFilePath)); XAssert.IsTrue(Directory.Exists(tempOutputDirectoryPath)); XAssert.IsTrue(Directory.Exists(optionalOutputDirectoryPath)); XAssert.IsTrue(Directory.Exists(outputDirectoryPath)); XAssert.IsTrue(Directory.Exists(sharedOutputDirectoryPath)); // Shared output directory is always scrubbed, and thus its contents should be removed. XAssert.IsFalse(File.Exists(outputFileInOutputSharedDirectoryPath)); // All junk files/directories should be removed, except ... (see below). XAssert.IsFalse(File.Exists(junkOutputPath)); XAssert.IsFalse(File.Exists(junkTempOutputPath)); XAssert.IsFalse(File.Exists(junkOptionalOutputPath)); XAssert.IsFalse(Directory.Exists(junkDirectoryPath)); // Junk output in an output directory is not removed because // when we run again the pip (can be from cache), the whole output directory will be removed. XAssert.IsTrue(File.Exists(junkOutputInOutputDirectoryPath)); } }
public async Task Stress() { const int N = 5; const int M = N * N; var context = BuildXLContext.CreateInstanceForTesting(); var loggingContext = CreateLoggingContextForTest(); var pathTable = context.PathTable; using (var tempFiles = new TempFileStorage(canGetFileNames: true)) { var config = ConfigHelpers.CreateDefault(pathTable, tempFiles.GetUniqueFileName(), tempFiles); using (var pipTable = new PipTable( context.PathTable, context.SymbolTable, initialBufferSize: 1024, maxDegreeOfParallelism: (Environment.ProcessorCount + 2) / 3, debug: false)) { var executionEnvironment = new PipQueueTestExecutionEnvironment( context, config, pipTable, Path.Combine(TestOutputDirectory, "temp"), TryGetSubstSourceAndTarget(out string substSource, out string substTarget) ? (substSource, substTarget) : default((string, string)?), GetSandboxConnection()); Func <RunnablePip, Task <PipResult> > taskFactory = async(runnablePip) => { PipResult result; var operationTracker = new OperationTracker(runnablePip.LoggingContext); var pip = runnablePip.Pip; using (var operationContext = operationTracker.StartOperation(PipExecutorCounter.PipRunningStateDuration, pip.PipId, pip.PipType, runnablePip.LoggingContext)) { result = await TestPipExecutor.ExecuteAsync(operationContext, executionEnvironment, pip); } executionEnvironment.MarkExecuted(pip); return(result); }; string executable = CmdHelper.OsShellExe; FileArtifact executableArtifact = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, executable)); // This is the only file artifact we reference without a producer. Rather than scheduling a hashing pip, let's just invent one (so fingerprinting can succeed). executionEnvironment.AddWellKnownFile(executableArtifact, WellKnownContentHashes.UntrackedFile); using (var phase1PipQueue = new PipQueue(executionEnvironment.Configuration.Schedule)) { // phase 1: create some files var baseFileArtifacts = new List <FileArtifact>(); for (int i = 0; i < N; i++) { string destination = tempFiles.GetUniqueFileName(); AbsolutePath destinationAbsolutePath = AbsolutePath.Create(pathTable, destination); FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion(); baseFileArtifacts.Add(destinationArtifact); PipData contents = PipDataBuilder.CreatePipData( context.StringTable, " ", PipDataFragmentEscaping.CRuntimeArgumentRules, i.ToString(CultureInfo.InvariantCulture)); var writeFile = new WriteFile(destinationArtifact, contents, WriteFileEncoding.Utf8, ReadOnlyArray <StringId> .Empty, PipProvenance.CreateDummy(context)); var pipId = pipTable.Add((uint)(i + 1), writeFile); var contentHash = ContentHashingUtilities.HashString(contents.ToString(pathTable)); executionEnvironment.AddExpectedWrite(writeFile, destinationArtifact, contentHash); var runnable = RunnablePip.Create(loggingContext, executionEnvironment, pipId, pipTable.GetPipType(pipId), 0, taskFactory, 0); runnable.Start(new OperationTracker(loggingContext), loggingContext); runnable.SetDispatcherKind(DispatcherKind.IO); phase1PipQueue.Enqueue(runnable); } phase1PipQueue.SetAsFinalized(); phase1PipQueue.DrainQueues(); await Task.WhenAll( Enumerable.Range(0, 2).Select( async range => { using (var phase2PipQueue = new PipQueue(executionEnvironment.Configuration.Schedule)) { // phase 2: do some more with those files var pips = new ConcurrentDictionary <PipId, Tuple <string, int> >(); var checkerTasks = new ConcurrentQueue <Task>(); Action <PipId, Task <PipResult> > callback = (id, task) => { XAssert.IsTrue(task.Status == TaskStatus.RanToCompletion); XAssert.IsFalse(task.Result.Status.IndicatesFailure()); Tuple <string, int> t; if (!pips.TryRemove(id, out t)) { XAssert.Fail(); } checkerTasks.Enqueue( Task.Run( () => { string actual = File.ReadAllText(t.Item1).Trim(); // TODO: Make this async XAssert.AreEqual(actual, t.Item2.ToString()); })); }; var r = new Random(0); for (int i = 0; i < M; i++) { int sourceIndex = r.Next(baseFileArtifacts.Count); FileArtifact sourceArtifact = baseFileArtifacts[sourceIndex]; string destination = tempFiles.GetUniqueFileName(); AbsolutePath destinationAbsolutePath = AbsolutePath.Create(pathTable, destination); FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion(); Pip pip; DispatcherKind queueKind; switch (r.Next(2)) { case 0: pip = new CopyFile(sourceArtifact, destinationArtifact, ReadOnlyArray <StringId> .Empty, PipProvenance.CreateDummy(context)); queueKind = DispatcherKind.IO; executionEnvironment.AddExpectedWrite(pip, destinationArtifact, executionEnvironment.GetExpectedContent(sourceArtifact)); break; case 1: string workingDirectory = OperatingSystemHelper.IsUnixOS ? "/tmp" : Environment.GetFolderPath(Environment.SpecialFolder.Windows); AbsolutePath workingDirectoryAbsolutePath = AbsolutePath.Create(pathTable, workingDirectory); var pipData = OperatingSystemHelper.IsUnixOS ? PipDataBuilder.CreatePipData(pathTable.StringTable, " ", PipDataFragmentEscaping.CRuntimeArgumentRules, "-c", "'", "cp", sourceArtifact, destinationArtifact, "'") : PipDataBuilder.CreatePipData(pathTable.StringTable, " ", PipDataFragmentEscaping.CRuntimeArgumentRules, "/d", "/c", "copy", "/B", sourceArtifact, destinationArtifact); queueKind = DispatcherKind.CPU; pip = new Process( executableArtifact, workingDirectoryAbsolutePath, pipData, FileArtifact.Invalid, PipData.Invalid, ReadOnlyArray <EnvironmentVariable> .Empty, FileArtifact.Invalid, FileArtifact.Invalid, FileArtifact.Invalid, tempFiles.GetUniqueDirectory(pathTable), null, null, ReadOnlyArray <FileArtifact> .FromWithoutCopy(executableArtifact, sourceArtifact), ReadOnlyArray <FileArtifactWithAttributes> .FromWithoutCopy(destinationArtifact.WithAttributes()), ReadOnlyArray <DirectoryArtifact> .Empty, ReadOnlyArray <DirectoryArtifact> .Empty, ReadOnlyArray <PipId> .Empty, ReadOnlyArray <AbsolutePath> .From(CmdHelper.GetCmdDependencies(pathTable)), ReadOnlyArray <AbsolutePath> .From(CmdHelper.GetCmdDependencyScopes(pathTable)), ReadOnlyArray <StringId> .Empty, ReadOnlyArray <int> .Empty, ReadOnlyArray <ProcessSemaphoreInfo> .Empty, provenance: PipProvenance.CreateDummy(context), toolDescription: StringId.Invalid, additionalTempDirectories: ReadOnlyArray <AbsolutePath> .Empty); executionEnvironment.AddExpectedWrite(pip, destinationArtifact, executionEnvironment.GetExpectedContent(sourceArtifact)); break; default: Contract.Assert(false); continue; } var pipId = pipTable.Add((uint)((range *M) + N + i + 1), pip); Func <RunnablePip, Task> taskFactoryWithCallback = async(runnablePip) => { var task = taskFactory(runnablePip); var pipResult = await task; callback(pipId, task); }; var runnable = RunnablePip.Create(loggingContext, executionEnvironment, pipId, pipTable.GetPipType(pipId), 0, taskFactoryWithCallback, 0); runnable.Start(new OperationTracker(loggingContext), loggingContext); runnable.SetDispatcherKind(queueKind); phase2PipQueue.Enqueue(runnable); if (!pips.TryAdd(pipId, Tuple.Create(destination, sourceIndex))) { Contract.Assert(false); } } phase2PipQueue.SetAsFinalized(); phase2PipQueue.DrainQueues(); XAssert.AreEqual(0, pips.Count); await Task.WhenAll(checkerTasks); } })); } } } }
/// <summary> /// Helper to create a token with given text. /// </summary> private static LocationData CreateToken(BuildXLContext context) { return(LocationData.Create(AbsolutePath.Create(context.PathTable, AssemblyHelper.GetAssemblyLocation(typeof(MountsTable).GetTypeInfo().Assembly)))); }
private static ICommandLineConfiguration CreateConfiguration( IEnumerable <string> sdksToResolve, PathTable pathTable, AbsolutePath testFolderPath) { var configFilePath = testFolderPath.Combine(pathTable, Names.ConfigDsc); var packageFilePath = testFolderPath.Combine(pathTable, Names.ModuleConfigBm); var sdkPackages = sdksToResolve .SelectMany(folder => Directory.EnumerateFiles(folder, Names.PackageConfigDsc, SearchOption.AllDirectories).Concat( Directory.EnumerateFiles(folder, Names.ModuleConfigBm, SearchOption.AllDirectories).Concat( Directory.EnumerateFiles(folder, Names.ModuleConfigDsc, SearchOption.AllDirectories)))) .Select(packageFile => AbsolutePath.Create(pathTable, packageFile)) .ToList(); var configuration = new CommandLineConfiguration { // Have to new up the list because we have some bad semantics dealing with the list being null or not. Packages = new List <AbsolutePath> { packageFilePath, }, Resolvers = { new SourceResolverSettings { Kind = "SourceResolver", Modules = sdkPackages, }, }, FrontEnd = { MaxFrontEndConcurrency = 1, // Single threaded for deterministic evaluation NameResolutionSemantics = NameResolutionSemantics.ImplicitProjectReferences, // PreserveFullNames = true, Some comment in code as not to turn on, check with folks.... UsePartialEvaluation = true, UseSpecPublicFacadeAndAstWhenAvailable = false, ConstructAndSaveBindingFingerprint = false, // Some of the DS tests fail when incremental frontend is not used EnableIncrementalFrontEnd = true, }, Engine = { TrackBuildsInUserFolder = false, }, Layout = { OutputDirectory = testFolderPath.Combine(pathTable, "Out"), ObjectDirectory = testFolderPath.Combine(pathTable, "Out").Combine(pathTable, "Objects"), PrimaryConfigFile = configFilePath, SourceDirectory = testFolderPath, TempDirectory = testFolderPath.Combine(pathTable, "Out").Combine(pathTable, "Temp"), }, Mounts = { }, Startup = { ConfigFile = configFilePath, }, DisableDefaultSourceResolver = false, }; return(configuration); }
/// <summary> /// Tries to filter a given workspace definition by reusing information from the previous BuildXL invocation. /// </summary> /// <returns> /// 1. Failure if the error occurred during parsing/binding one of the changed specs. /// 2. Result(null) when the filtering failed due to symbols mismatch or due to another reason. /// 3. Result(WorkspaceDefinition) when the filtering succeeded. /// </returns> /// <remarks> /// If the previous binding information can be reused, then the set of specs that are safe to use as public facades + serialized AST /// are identified as well /// </remarks> private async Task <FilteredWorkspaceDefinition> TryFilterWorkspaceDefinitionIncrementallyAsync( List <string> changedFiles, IWorkspaceProvider workspaceProvider, WorkspaceDefinition workspaceDefinition, EvaluationFilter evaluationFilter) { Logger.TryingToReuseFrontEndSnapshot(LoggingContext); // TODO: potentially, we could check the number of changes compared to the workspace definition size. // If the number of changes is too big, maybe we should go into the full parse mode. // But we need to check the perf implications before making this decision. var changedSpecs = changedFiles.Select( p => { var fullPath = AbsolutePath.Create(FrontEndContext.PathTable, p); var containingModule = workspaceDefinition.TryGetModuleDefinition(fullPath); return(new SpecWithOwningModule(fullPath, containingModule)); }).ToArray(); // Need to check if the spec does not belong to the current workspace // or the changed spec belongs to the prelude. foreach (var changedSpec in changedSpecs) { if (changedSpec.OwningModule == null) { Logger.FailToReuseFrontEndSnapshot( LoggingContext, I($"Changed spec file '{changedSpec.Path.ToString(FrontEndContext.PathTable)}' is not part of the computed workspace.")); return(FilteredWorkspaceDefinition.CanNotFilter()); } if (changedSpec.OwningModule.Descriptor == workspaceDefinition.PreludeModule.Descriptor) { Logger.FailToReuseFrontEndSnapshot( LoggingContext, I($"Changed spec file '{changedSpec.Path.ToString(FrontEndContext.PathTable)}' is part of the prelude.")); return(FilteredWorkspaceDefinition.CanNotFilter()); } } // Getting the snapshot from the previous run. // Binding snapshot contains all the specs as well as all the configuration files. // Need to adjust the count. var expectedNumberOfSpecs = workspaceDefinition.SpecCount + (workspaceProvider.GetConfigurationModule()?.Specs.Count ?? 0); var snapshot = FrontEndArtifactManager.TryLoadFrontEndSnapshot(expectedNumberOfSpecs); if (snapshot == null) { // The error message was already logged. return(FilteredWorkspaceDefinition.CanNotFilter()); } // Parsing and binding all the changed specs. var possibleParseResult = await workspaceProvider.ParseAndBindSpecsAsync(changedSpecs); var firstFailure = LogParseOrBindingErrorsIfAny(possibleParseResult); if (firstFailure != null) { // This is actual failure. // Instead of switching to the full mode, we can actually stop here. return(FilteredWorkspaceDefinition.Error(firstFailure)); } // Snapshot is valid and parse/binding is completed successfully. var snapshotState = GetSnapshotReuseState(possibleParseResult, snapshot); if (snapshotState.State == SnapshotState.NoMatch) { // NoMatch is returned if the snapshot is unavailable. if (snapshotState.SpecsWithIncompatiblePublicSurface.Count != 0) { Logger.FailToReuseFrontEndSnapshot( LoggingContext, I($"Spec file '{snapshotState.SpecsWithIncompatiblePublicSurface.First().Path.AbsolutePath}' changed its binding symbols.")); } return(FilteredWorkspaceDefinition.CanNotFilter()); } // Changed file could get different symbols. // Need to re-save it within the front-end snapshot. UpdateAndSaveSnapshot(possibleParseResult, snapshot); var snapshotProvider = new SnapshotBasedSpecProvider(snapshot); // Now we know exactly which are all the files that need to go through parsing/type checking/AST conversion. So we // inform that to the artifact manager so the public surface and AST serialization // can be resued for the rest, if available. // Observe these set of files are not reflecting a potential user filter, but that's fine. If there is a dirty spec // that is outside of the filter, that spec won't be requested by the workspace anyway NotifyDirtySpecsForPublicFacadeAndAstReuse( snapshotProvider, workspaceDefinition, changedSpecs.Select(f => f.Path).ToList()); // The fingerprints for all changed specs are still the same, // so we can filter the workspace definition provided that the filter allows it. if (snapshotState.State == SnapshotState.FullMatch) { var filter = new WorkspaceFilter(FrontEndContext.PathTable); var filteredWorkspace = evaluationFilter.CanPerformPartialEvaluationScript(PrimaryConfigFile) ? filter.FilterWorkspaceDefinition(workspaceDefinition, evaluationFilter, snapshotProvider) : workspaceDefinition.Modules; return(FilteredWorkspaceDefinition.Filter(new WorkspaceDefinition(filteredWorkspace, workspaceDefinition.PreludeModule))); } // Specs are not the same, but we would be able to load public facades for all unaffected specs. var dirtySpecNames = string.Join( ", ", snapshotState.SpecsWithTheSamePublicSurface.Take(10).Select(p => Path.GetFileName(p.Path.AbsolutePath))); Logger.FailedToFilterWorkspaceDefinition( LoggingContext, I($"{dirtySpecNames} changed one or more declarations.")); return(FilteredWorkspaceDefinition.CanNotFilter()); }
/// <summary> /// Gets AbsolutePath representation of URI. /// </summary> public static AbsolutePath ToAbsolutePath(this Uri uri, PathTable pathTable) { return(AbsolutePath.Create(pathTable, uri.ToAbsolutePath())); }
private MergeResult HardlinkOpaqueDirectories( Process process, ContainerConfiguration containerConfiguration, PipExecutionContext pipExecutionContext, IReadOnlyDictionary <AbsolutePath, IReadOnlyCollection <AbsolutePath> > sharedDynamicWrites, HashSet <AbsolutePath> createdDirectories) { bool isolateSharedOpaques = process.ContainerIsolationLevel.IsolateSharedOpaqueOutputDirectories(); bool isolateExclusiveOpaques = process.ContainerIsolationLevel.IsolateExclusiveOpaqueOutputDirectories(); // Shortcut the iteration of output directories are not isolated at all if (!isolateExclusiveOpaques && !isolateSharedOpaques) { return(MergeResult.Success); } foreach (DirectoryArtifact directoryOutput in process.DirectoryOutputs) { if (directoryOutput.IsSharedOpaque && isolateSharedOpaques) { AbsolutePath redirectedDirectory = GetRedirectedDirectoryForOutputContainer(containerConfiguration, directoryOutput.Path).Path; // Here we don't need to check for WCI reparse points. We know those outputs are there based on what detours is saying. var sharedOpaqueContent = sharedDynamicWrites[directoryOutput.Path]; foreach (AbsolutePath sharedOpaqueFile in sharedOpaqueContent) { string sourcePath = sharedOpaqueFile.Relocate(m_pathTable, directoryOutput.Path, redirectedDirectory).ToString(m_pathTable); // The file may not exist because the pip could have created it but later deleted it if (!FileUtilities.Exists(sourcePath)) { continue; } ExpandedAbsolutePath destinationPath = sharedOpaqueFile.Expand(m_pathTable); // Files in an opaque always have rewrite count 1 var result = TryCreateHardlinkForOutput(destinationPath, rewriteCount: 1, sourcePath, process, pipExecutionContext, createdDirectories); if (result != MergeResult.Success) { return(result); } } } else if (!directoryOutput.IsSharedOpaque && isolateExclusiveOpaques) { // We need to enumerate to discover the content of an exclusive opaque, and also skip the potential reparse points // TODO: Enumeration will happen again when the file content manager tries to discover the content of the exclusive opaque. Consider doing this only once instead. // An output directory should only have one redirected path ExpandedAbsolutePath redirectedDirectory = containerConfiguration.OriginalDirectories[directoryOutput.Path].Single(); foreach (string exclusiveOpaqueFile in Directory.EnumerateFiles(redirectedDirectory.ExpandedPath, "*", SearchOption.AllDirectories)) { if (FileUtilities.IsWciReparsePoint(exclusiveOpaqueFile)) { continue; } AbsolutePath exclusiveOpaqueFilePath = AbsolutePath.Create(m_pathTable, exclusiveOpaqueFile); AbsolutePath outputFile = exclusiveOpaqueFilePath.Relocate(m_pathTable, redirectedDirectory.Path, directoryOutput.Path); // Files in an opaque always have rewrite count 1 var result = TryCreateHardlinkForOutput(outputFile.Expand(m_pathTable), rewriteCount: 1, exclusiveOpaqueFile, process, pipExecutionContext, createdDirectories); if (result != MergeResult.Success) { return(result); } } } } return(MergeResult.Success); }
private FileArtifact SourceFile(string path) => FileArtifact.CreateSourceFile(AbsolutePath.Create(PathTable, path));
public async Task MoveTo(AbsolutePath path) { await using var fs = await path.Create(); await _file.CopyDataTo(fs); }
private FileArtifact ToUpper(FileArtifact file) { var pt = Context.PathTable; return(AbsentFile(AbsolutePath.Create(pt, file.Path.ToString(pt).ToUpperInvariant()))); }
private DirectoryTranslator.RawInputTranslation CreateInputTranslationWithJunction( PathTable pathTable, string[] relativeSource, string[] relativeTarget, bool createSourceDirectory = true, bool createTargetDirectory = true, bool createJunction = true) { string fullSource = Path.Combine(TestOutputDirectory, R(relativeSource)); string fullTarget = Path.Combine(TestOutputDirectory, R(relativeTarget)); if (createJunction) { // If junction is requested, then ensure that source and target directories exist. createSourceDirectory = true; createTargetDirectory = true; } if (createSourceDirectory) { FileUtilities.CreateDirectory(fullSource); } if (createTargetDirectory) { FileUtilities.CreateDirectory(fullTarget); } if (createJunction) { FileUtilities.CreateJunction(fullSource, fullTarget); } return(DirectoryTranslator.RawInputTranslation.Create(AbsolutePath.Create(pathTable, fullSource), AbsolutePath.Create(pathTable, fullTarget))); }