public static CustomResult Create(CustomConfig config, string inputFileName, CustomResult result, IS3Client s3Client) { string inputBaseName = inputFileName.TrimEndFromFirst(".tsv"); string ngaFileName = inputBaseName + SaCommon.GeneFileSuffix; string localNgaPath = Path.Combine(Path.GetTempPath(), ngaFileName); string localSchemaPath = localNgaPath + SaCommon.JsonSchemaSuffix; string localLogPath = Path.Combine(Path.GetTempPath(), LogFileName); HttpUtilities.ValidateUrl(LambdaUrlHelper.GetUgaUrl()); var outputFiles = new List <string>(); using (var aes = new AesCryptoServiceProvider()) { FileMetadata ngaMetadata, schemaMetadata, logMetaData; using (var logStream = FileUtilities.GetCreateStream(localLogPath)) using (var logCryptoStream = new CryptoStream(logStream, aes.CreateEncryptor(), CryptoStreamMode.Write)) using (var logMd5Stream = new MD5Stream(logCryptoStream)) // using (var customTsvStream = (PersistentStream)PersistentStreamUtils.GetReadStream(config.tsvUrl)) using (var parser = GetGeneAnnotationsParserFromCustomTsvStream(customTsvStream)) // using (var ngaStream = FileUtilities.GetCreateStream(localNgaPath)) using (var ngaCryptoStream = new CryptoStream(ngaStream, aes.CreateEncryptor(), CryptoStreamMode.Write)) using (var ngaMd5Stream = new MD5Stream(ngaCryptoStream)) // using (var schemaStream = FileUtilities.GetCreateStream(localSchemaPath)) using (var schemaCryptoStream = new CryptoStream(schemaStream, aes.CreateEncryptor(), CryptoStreamMode.Write)) using (var schemaMd5Stream = new MD5Stream(schemaCryptoStream)) { using (var ngaWriter = CaUtilities.GetNgaWriter(ngaMd5Stream, parser, inputFileName)) using (var schemaWriter = new StreamWriter(schemaMd5Stream)) using (var logWriter = new StreamWriter(logMd5Stream)) { ngaWriter.Write(parser.GetItems(config.skipGeneIdValidation, logWriter)); var unknownGenes = parser.GetUnknownGenes(); if (!config.skipGeneIdValidation && unknownGenes.Count > 0) { throw new UserErrorException($"The following gene IDs were not recognized in Nirvana: {string.Join(',', unknownGenes)}"); } schemaWriter.Write(parser.JsonSchema); } //all the writers have to be disposed before GetFileMetaData is called ngaMetadata = ngaMd5Stream.GetFileMetadata(); schemaMetadata = schemaMd5Stream.GetFileMetadata(); logMetaData = logMd5Stream.GetFileMetadata(); } if (config.skipGeneIdValidation) { string logS3Key = string.Join('/', config.outputDir.path.Trim('/'), LogFileName); Logger.WriteLine("uploading log file to " + logS3Key); s3Client.DecryptUpload(config.outputDir.bucketName, logS3Key, localLogPath, aes, logMetaData); } string nsaS3Path = string.Join('/', config.outputDir.path.Trim('/'), ngaFileName); string schemaS3Path = nsaS3Path + SaCommon.JsonSchemaSuffix; s3Client.DecryptUpload(config.outputDir.bucketName, nsaS3Path, localNgaPath, aes, ngaMetadata); s3Client.DecryptUpload(config.outputDir.bucketName, schemaS3Path, localSchemaPath, aes, schemaMetadata); outputFiles.Add(ngaFileName); outputFiles.Add(ngaFileName + SaCommon.JsonSchemaSuffix); LambdaUtilities.DeleteTempOutput(); return(CustomAnnotationLambda.GetSuccessResult(config, result, outputFiles)); } }
/// <summary> /// Writes null to the file as one 0x00 byte /// </summary> /// <returns>Number of bytes used to store the null</returns> internal int WriteNull() { byteBuffer[0] = 0x00; return(FileUtilities.WriteWithLength(fileStream, byteBuffer, 1)); }
/// <summary> /// Writes a byte to the file /// </summary> /// <returns>Number of bytes used to store the byte</returns> internal int WriteByte(byte val) { byteBuffer[0] = 0x01; // length byteBuffer[1] = val; return(FileUtilities.WriteWithLength(fileStream, byteBuffer, 2)); }
private static void ExecuteRequest(object state) { NetworkStream stream = null; byte [] buffer; int id = counter++; string tmpfile = null; TcpClient client = null; BinaryReader reader = null; BinaryWriter writer = null; string remote_ip; WebServiceLogin login = new WebServiceLogin(); try { buffer = new byte [1024]; client = (TcpClient)state; stream = client.GetStream(); stream.ReadTimeout = (int)TimeSpan.FromMinutes(5).TotalMilliseconds; stream.WriteTimeout = stream.ReadTimeout; reader = new BinaryReader(stream); writer = new BinaryWriter(stream); remote_ip = ((IPEndPoint)client.Client.RemoteEndPoint).Address.ToString(); /* Format: * Field Length in bytes Description * version 1 1 * name_length 1 * name <name_length> * password_length 1 * password <password_length> * work_id 4 The Work.id field in the database * file_count 2 * reserved 8 * * < file #1> * marker 12 'MonkeyWrench' * md5 16 * flags 1 1 = compressed, 2 = hidden * filename_length 1 * filename <filename_length> * <client waits for answer, either type 2 or 4, type 2: server already has data, type 4: server does not have data> * [type: 4] compressed_mime_length 1 * [type: 4] compressed_mime <compressed_mime_length> * [type: 4] content_length 4 * [type: 4] content <content_length> * [type: 4] <client waits for response, type 2> * * < file #2 > * ... * * The response data has this format: * version 1 * type 1 1 = everything OK, 2 = file received OK, 3 = error, 4 = send file * depending on type, here are the subsequent fields: * [type: 3] message_length 2 * [type: 3] message <message length> * */ byte version = reader.ReadByte(); byte name_length = reader.ReadByte(); login.User = ReadString(reader, buffer, name_length); byte password_length = reader.ReadByte(); login.Password = ReadString(reader, buffer, password_length); login.Ip4 = remote_ip; int work_id = reader.ReadInt32(); ushort file_count = reader.ReadUInt16(); reader.ReadInt64(); log.DebugFormat("Upload.ExecuteRequest (): {0} version: {1} work_id: {2} file count: {3} remote ip: {4}", id, version, work_id, file_count, client.Client.RemoteEndPoint.ToString()); using (DB db = new DB()) { Authentication.VerifyUserInRole(remote_ip, db, login, Roles.BuildBot, true); for (ushort i = 0; i < file_count; i++) { byte [] md5 = new byte [16]; byte flags; bool hidden; byte filename_length; byte compressed_mime_length; int content_length; string filename; string compressed_mime; string marker; marker = ReadString(reader, buffer, 12); if (marker != "MonkeyWrench") { throw new Exception(string.Format("Didn't get marker 'MonkeyWrench' at start of file, got '{0}'", marker)); } reader.Read(md5, 0, 16); flags = reader.ReadByte(); filename_length = reader.ReadByte(); filename = ReadString(reader, buffer, filename_length); hidden = (flags & 0x2) == 0x2; // compressed = (flags & 0x1) == 0x1; log.DebugFormat("Upload.ExecuteRequest (): {0} file #{1}: filename: '{2}' ", id, i + 1, filename); DBFile file = DBFile_Extensions.Find(db, FileUtilities.MD5BytesToString(md5)); if (file == null) { log.DebugFormat("Upload.ExecuteRequest (): {0} file #{1} must be sent, sending 'send file' response", id, i + 1); // Write 'send file' writer.Write((byte)1); // version writer.Write((byte)4); // type (4 = send file) writer.Flush(); compressed_mime_length = reader.ReadByte(); compressed_mime = ReadString(reader, buffer, compressed_mime_length); content_length = reader.ReadInt32(); log.DebugFormat("Upload.ExecuteRequest (): {0} file #{1} content_length: {2} compressed_mime: '{3}' reading...", id, i + 1, content_length, compressed_mime); int bytes_left = content_length; tmpfile = Path.GetTempFileName(); using (FileStream fs = new FileStream(tmpfile, FileMode.Open, FileAccess.Write, FileShare.Read)) { while (bytes_left > 0) { int to_read = Math.Min(bytes_left, buffer.Length); int read = reader.Read(buffer, 0, to_read); if (read == 0) { throw new Exception(string.Format("Failed to read {0} bytes, {1} bytes left", content_length, bytes_left)); } fs.Write(buffer, 0, read); bytes_left -= read; } } log.DebugFormat("Upload.ExecuteRequest (): {0} file #{1} received, uploading '{2}' to database", id, i + 1, tmpfile); file = db.Upload(FileUtilities.MD5BytesToString(md5), tmpfile, filename, Path.GetExtension(filename), hidden, compressed_mime); } else { log.DebugFormat("Upload.ExecuteRequest (): {0} file #{1} already in database, not uploading", id, i + 1); } DBWork work = DBWork_Extensions.Create(db, work_id); work.AddFile(db, file, filename, hidden); // Write 'file recieved OK' writer.Write((byte)1); // version writer.Write((byte)2); // type (2 = file received OK) writer.Flush(); log.DebugFormat("Upload.ExecuteRequest (): {0} {1} uploaded successfully", id, filename); } } // Write 'everything OK' writer.Write((byte)1); // version writer.Write((byte)1); // type (1 = everything OK) writer.Flush(); log.DebugFormat("Upload.ExecuteRequest (): {0} completed", id); } catch (Exception ex) { log.ErrorFormat("Upload.ExecuteRequest (): {0} {1}", id, ex); try { string msg = ex.ToString(); byte [] msg_buffer = Encoding.UTF8.GetBytes(msg); writer.Write((byte)1); // version writer.Write((byte)3); // type (3 = error) writer.Write((ushort)Math.Min(msg_buffer.Length, ushort.MaxValue)); // message_length writer.Write(msg_buffer, 0, Math.Min(msg_buffer.Length, ushort.MaxValue)); // message stream.Flush(); } catch (Exception ex2) { log.ErrorFormat("Upload.ExecuteRequest (): {0} Failed to send exception to client: {1}", id, ex2); } } finally { if (tmpfile != null) { FileUtilities.TryDeleteFile(tmpfile); } try { client.Close(); } catch (Exception ex) { log.ErrorFormat("Error closing connection: {0}", ex); // Ignore } } }
public void AddMetadataReference(string referencePath, MetadataReferenceProperties properties) { referencePath = FileUtilities.NormalizeAbsolutePath(referencePath); AddMetadataReferenceAndTryConvertingToProjectReferenceIfPossible(referencePath, properties); }
/// <summary> /// Actually do the work specified by the ZippedETLWriter constructors and other methods. /// </summary> public bool WriteArchive(CompressionLevel compressionLevel = CompressionLevel.Optimal) { List <string> pdbFileList = PrepForWrite(); if (!Zip) { return(true); } bool success = false; var sw = Stopwatch.StartNew(); if (ZipArchivePath == null) { ZipArchivePath = m_etlFilePath + ".zip"; } var newFileName = ZipArchivePath + ".new"; FileUtilities.ForceDelete(newFileName); try { Log.WriteLine("[Zipping ETL file {0}]", m_etlFilePath); using (var zipArchive = ZipFile.Open(newFileName, ZipArchiveMode.Create)) { zipArchive.CreateEntryFromFile(m_etlFilePath, Path.GetFileName(m_etlFilePath), compressionLevel); if (pdbFileList != null) { Log.WriteLine("[Writing {0} PDBS to Zip file]", pdbFileList.Count); // Add the Pdbs to the archive foreach (var pdb in pdbFileList) { // If the path looks like a sym server cache path, grab that chunk, otherwise just copy the file name part of the path. string relativePath; var m = Regex.Match(pdb, @"\\([^\\]+.pdb\\\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\w\d+\\[^\\]+)$"); if (m.Success) { relativePath = m.Groups[1].Value; } else { relativePath = Path.GetFileName(pdb); } var archivePath = Path.Combine("symbols", relativePath); // log.WriteLine("Writing PDB {0} to archive.", archivePath); zipArchive.CreateEntryFromFile(pdb, archivePath, compressionLevel); } Log.Flush(); if (m_additionalFiles != null) { foreach (Tuple <string, string> additionalFile in m_additionalFiles) { // We dont use CreatEntryFromFile because it will not open files thar are open for writting. // Since a typical use of this is to write the log file, which will be open for writing, we // use File.Open and allow this case explicitly. using (Stream fs = File.Open(additionalFile.Item1, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { // Item2 tells you the path in the archive. var entry = zipArchive.CreateEntry(additionalFile.Item2, compressionLevel); using (Stream es = entry.Open()) fs.CopyTo(es); } } } } } FileUtilities.ForceMove(newFileName, ZipArchivePath); if (DeleteInputFile) { Log.WriteLine("Deleting original ETL file {0}", m_etlFilePath); FileUtilities.ForceDelete(m_etlFilePath); } // We make the ZIP the same time as the original file. File.SetLastWriteTimeUtc(ZipArchivePath, File.GetLastWriteTimeUtc(m_etlFilePath)); success = true; } finally { FileUtilities.ForceDelete(newFileName); } sw.Stop(); Log.WriteLine("ZIP generation took {0:f3} sec", sw.Elapsed.TotalSeconds); Log.WriteLine("ZIP output file {0}", ZipArchivePath); return(success); }
/// <summary> /// Internal constructor /// </summary> /// <param name="ownerTask">The task that is using file tracker</param> /// <param name="tlogFiles">The .write. tlog files to interpret</param> /// <param name="skipMissingFiles">Ignore files that do not exist on disk</param> /// <param name="excludedInputPaths">The set of paths that contain files that are to be ignored during up to date check</param> private void InternalConstruct(ITask ownerTask, ITaskItem[] tlogFilesLocal, ITaskItem[] tlogFilesToIgnore, bool skipMissingFiles, DateTime missingFileTimeUtc, string[] excludedInputPaths) { if (ownerTask != null) { _log = new TaskLoggingHelper(ownerTask); _log.TaskResources = AssemblyResources.PrimaryResources; _log.HelpKeywordPrefix = "MSBuild."; } ITaskItem[] expandedTlogFiles = TrackedDependencies.ExpandWildcards(tlogFilesLocal); if (tlogFilesToIgnore != null) { ITaskItem[] expandedTlogFilesToIgnore = TrackedDependencies.ExpandWildcards(tlogFilesToIgnore); if (expandedTlogFilesToIgnore.Length > 0) { HashSet <string> ignore = new HashSet <string>(); List <ITaskItem> remainingTlogFiles = new List <ITaskItem>(); foreach (ITaskItem tlogFileToIgnore in expandedTlogFilesToIgnore) { ignore.Add(tlogFileToIgnore.ItemSpec); } foreach (ITaskItem tlogFile in expandedTlogFiles) { if (!ignore.Contains(tlogFile.ItemSpec)) { remainingTlogFiles.Add(tlogFile); } } _tlogFiles = remainingTlogFiles.ToArray(); } else { _tlogFiles = expandedTlogFiles; } } else { _tlogFiles = expandedTlogFiles; } // We have no TLog files on disk, create a TLog marker from the // TLogFiles ItemSpec so we can fabricate one if we need to // This becomes our "first" tlog, since on the very first run, no tlogs // will exist, and if a compaction has been run (as part of the initial up-to-date check) then this // marker tlog will be created as empty. if (_tlogFiles == null || _tlogFiles.Length == 0) { _tlogMarker = tlogFilesLocal[0].ItemSpec.Replace("*", "1"); _tlogMarker = _tlogMarker.Replace("?", "2"); } if (excludedInputPaths != null) { // Assign our exclude paths to our lookup - and make sure that all recorded paths end in a slash so that // our "starts with" comparison doesn't pick up incomplete matches, such as C:\Foo matching C:\FooFile.txt foreach (string excludePath in excludedInputPaths) { string fullexcludePath = FileUtilities.EnsureTrailingSlash(FileUtilities.NormalizePath(excludePath)).ToUpperInvariant(); _excludedInputPaths.Add(fullexcludePath); } } _tlogsAvailable = TrackedDependencies.ItemsExist(_tlogFiles); _skipMissingFiles = skipMissingFiles; _missingFileTimeUtc = missingFileTimeUtc.ToUniversalTime(); if (_tlogFiles != null) { // Read the TLogs into our internal structures ConstructFileTable(); } }
public void ItShouldNotCopyAnyFiles() { FileUtilities.AssertWasNotCalled( f => f.FileCopy(null, null), o => o.IgnoreArguments()); }
/// <summary> /// Given the absolute location of a file, and a disc location, returns relative file path to that disk location. /// Throws UriFormatException. /// </summary> /// <param name="basePath"> /// The base path we want to relativize to. Must be absolute. /// Should <i>not</i> include a filename as the last segment will be interpreted as a directory. /// </param> /// <param name="path"> /// The path we need to make relative to basePath. The path can be either absolute path or a relative path in which case it is relative to the base path. /// If the path cannot be made relative to the base path (for example, it is on another drive), it is returned verbatim. /// </param> /// <returns>relative path (can be the full path)</returns> internal static string MakeRelative(string basePath, string path) { string result = FileUtilities.MakeRelative(basePath, path); return(result); }
protected void DecorateItemsWithMetadata(IEnumerable <ItemBatchingContext> itemBatchingContexts, ImmutableList <ProjectMetadataElement> metadata, bool?needToExpandMetadata = null) { if (metadata.Count > 0) { //////////////////////////////////////////////////// // UNDONE: Implement batching here. // // We want to allow built-in metadata in metadata values here. // For example, so that an Idl file can specify that its Tlb output should be named %(Filename).tlb. // // In other words, we want batching. However, we won't need to go to the trouble of using the regular batching code! // That's because that code is all about grouping into buckets of similar items. In this context, we're not // invoking a task, and it's fine to process each item individually, which will always give the correct results. // // For the CTP, to make the minimal change, we will not do this quite correctly. // // We will do this: // -- check whether any metadata values or their conditions contain any bare built-in metadata expressions, // or whether they contain any custom metadata && the Include involved an @(itemlist) expression. // -- if either case is found, we go ahead and evaluate all the metadata separately for each item. // -- otherwise we can do the old thing (evaluating all metadata once then applying to all items) // // This algorithm gives the correct results except when: // -- batchable expressions exist on the include, exclude, or condition on the item element itself // // It means that 99% of cases still go through the old code, which is best for the CTP. // When we ultimately implement this correctly, we should make sure we optimize for the case of very many items // and little metadata, none of which varies between items. // Do not expand properties as they have been already expanded by the lazy evaluator upon item operation construction. // Prior to lazy evaluation ExpanderOptions.ExpandAll was used. const ExpanderOptions metadataExpansionOptions = ExpanderOptions.ExpandAll; needToExpandMetadata ??= NeedToExpandMetadataForEachItem(metadata, out _); if (needToExpandMetadata.Value) { foreach (var itemContext in itemBatchingContexts) { _expander.Metadata = itemContext.GetMetadataTable(); foreach (var metadataElement in metadata) { if (!EvaluateCondition(metadataElement.Condition, metadataElement, metadataExpansionOptions, ParserOptions.AllowAll, _expander, _lazyEvaluator)) { continue; } string evaluatedValue = _expander.ExpandIntoStringLeaveEscaped(metadataElement.Value, metadataExpansionOptions, metadataElement.Location); itemContext.OperationItem.SetMetadata(metadataElement, FileUtilities.MaybeAdjustFilePath(evaluatedValue, metadataElement.ContainingProject.DirectoryPath)); } } // End of legal area for metadata expressions. _expander.Metadata = null; } // End of pseudo batching //////////////////////////////////////////////////// // Start of old code else { // Metadata expressions are allowed here. // Temporarily gather and expand these in a table so they can reference other metadata elements above. EvaluatorMetadataTable metadataTable = new EvaluatorMetadataTable(_itemType); _expander.Metadata = metadataTable; // Also keep a list of everything so we can get the predecessor objects correct. List <Pair <ProjectMetadataElement, string> > metadataList = new List <Pair <ProjectMetadataElement, string> >(metadata.Count); foreach (var metadataElement in metadata) { // Because of the checking above, it should be safe to expand metadata in conditions; the condition // will be true for either all the items or none if ( !EvaluateCondition( metadataElement.Condition, metadataElement, metadataExpansionOptions, ParserOptions.AllowAll, _expander, _lazyEvaluator )) { continue; } string evaluatedValue = _expander.ExpandIntoStringLeaveEscaped(metadataElement.Value, metadataExpansionOptions, metadataElement.Location); evaluatedValue = FileUtilities.MaybeAdjustFilePath(evaluatedValue, metadataElement.ContainingProject.DirectoryPath); metadataTable.SetValue(metadataElement, evaluatedValue); metadataList.Add(new Pair <ProjectMetadataElement, string>(metadataElement, evaluatedValue)); } // Apply those metadata to each item // Note that several items could share the same metadata objects // Set all the items at once to make a potential copy-on-write optimization possible. // This is valuable in the case where one item element evaluates to // many items (either by semicolon or wildcards) // and that item also has the same piece/s of metadata for each item. _itemFactory.SetMetadata(metadataList, itemBatchingContexts.Select(i => i.OperationItem)); // End of legal area for metadata expressions. _expander.Metadata = null; } } }
/// <inheritdoc /> public Task <FileExistenceResult> CheckFileExistsAsync(AbsolutePath path, TimeSpan timeout, CancellationToken cancellationToken) { var resultCode = FileUtilities.Exists(path.Path) ? FileExistenceResult.ResultCode.FileExists : FileExistenceResult.ResultCode.FileNotFound; return(Task.FromResult(new FileExistenceResult(resultCode))); }
[PlatformSpecific(TestPlatforms.Windows)] // "Under Unix all filenames are valid and this test is not useful" public void SomeInputsFailToCreate() { string temp = Path.GetTempPath(); string file = Path.Combine(temp, "2A333ED756AF4dc392E728D0F864A38e"); string dir = Path.Combine(temp, "2A333ED756AF4dc392E728D0F864A38f"); string invalid = "!@#$%^&*()|"; string dir2 = Path.Combine(temp, "2A333ED756AF4dc392E728D0F864A390"); try { FileStream fs = File.Create(file); fs.Dispose(); //we're gonna try to delete it MakeDir t = new MakeDir(); MockEngine engine = new MockEngine(); t.BuildEngine = engine; t.Directories = new ITaskItem[] { new TaskItem(dir), new TaskItem(file), new TaskItem(invalid), new TaskItem(dir2) }; bool success = t.Execute(); if (NativeMethodsShared.IsWindows) { Assert.False(success); Assert.Equal(2, t.DirectoriesCreated.Length); Assert.Equal(dir2, t.DirectoriesCreated[1].ItemSpec); } else { // Since Unix pretty much does not have invalid characters, // the invalid name is not really invalid Assert.True(success); Assert.Equal(3, t.DirectoriesCreated.Length); Assert.Equal(dir2, t.DirectoriesCreated[2].ItemSpec); } Assert.Equal(dir, t.DirectoriesCreated[0].ItemSpec); Assert.True ( engine.Log.Contains ( String.Format(AssemblyResources.GetString("MakeDir.Comment"), dir) ) ); } finally { FileUtilities.DeleteWithoutTrailingBackslash(dir); File.Delete(file); if (!NativeMethodsShared.IsWindows) { File.Delete(invalid); } FileUtilities.DeleteWithoutTrailingBackslash(dir2); } }
/// <summary> /// Builds a patch using given paths for the target and upgrade packages. /// </summary> /// <param name="targetPath">The path to the target MSI.</param> /// <param name="upgradePath">The path to the upgrade MSI.</param> /// <param name="name">The name of the patch to build.</param> /// <param name="version">Optional version for the bundle.</param> /// <returns>The path to the patch.</returns> private string BuildPatch(string targetPath, string upgradePath, string name, string version) { // Get the name of the calling method. StackTrace stack = new StackTrace(); string caller = stack.GetFrame(1).GetMethod().Name; // Create paths. string source = Path.Combine(this.TestContext.TestDataDirectory, String.Concat(name, ".wxs")); string rootDirectory = FileUtilities.GetUniqueFileName(); string objDirectory = Path.Combine(rootDirectory, Settings.WixobjFolder); string msiDirectory = Path.Combine(rootDirectory, Settings.MsiFolder); string wixmst = Path.Combine(objDirectory, String.Concat(name, ".wixmst")); string wixmsp = Path.Combine(objDirectory, String.Concat(name, ".wixmsp")); string package = Path.Combine(msiDirectory, String.Concat(name, ".msp")); // Add the root directory to be cleaned up. this.TestArtifacts.Add(new DirectoryInfo(rootDirectory)); // Compile. Candle candle = new Candle(); candle.Extensions.AddRange(DependencyExtensionTests.Extensions); candle.OtherArguments = String.Concat("-dTestName=", caller); if (!String.IsNullOrEmpty(version)) { candle.OtherArguments = String.Concat(candle.OtherArguments, " -dVersion=", version); } candle.OutputFile = String.Concat(objDirectory, @"\"); candle.SourceFiles.Add(source); candle.WorkingDirectory = this.TestContext.TestDataDirectory; candle.Run(); // Make sure the output directory is cleaned up. this.TestArtifacts.Add(new DirectoryInfo(objDirectory)); // Link. Light light = new Light(); light.Extensions.AddRange(DependencyExtensionTests.Extensions); light.ObjectFiles = candle.ExpectedOutputFiles; light.OutputFile = wixmsp; light.SuppressMSIAndMSMValidation = true; light.WorkingDirectory = this.TestContext.TestDataDirectory; light.Run(); // Make sure the output directory is cleaned up. this.TestArtifacts.Add(new DirectoryInfo(msiDirectory)); // Torch. Torch torch = new Torch(); torch.TargetInput = Path.ChangeExtension(targetPath, "wixpdb"); torch.UpdatedInput = Path.ChangeExtension(upgradePath, "wixpdb"); torch.PreserveUnmodified = true; torch.XmlInput = true; torch.OutputFile = wixmst; torch.WorkingDirectory = this.TestContext.TestDataDirectory; torch.Run(); // Pyro. Pyro pyro = new Pyro(); pyro.Baselines.Add(torch.OutputFile, name); pyro.InputFile = light.OutputFile; pyro.OutputFile = package; pyro.WorkingDirectory = this.TestContext.TestDataDirectory; pyro.SuppressWarnings.Add("1079"); pyro.Run(); return(pyro.OutputFile); }
// internal for testing internal ImmutableArray <CompletionItem> GetItems(string directoryPath, CancellationToken cancellationToken) { if (!PathUtilities.IsUnixLikePlatform && directoryPath.Length == 1 && directoryPath[0] == '\\') { // The user has typed only "\". In this case, we want to add "\\" to the list. return(ImmutableArray.Create(CreateNetworkRoot())); } var result = ArrayBuilder <CompletionItem> .GetInstance(); var pathKind = PathUtilities.GetPathKind(directoryPath); switch (pathKind) { case PathKind.Empty: // base directory if (_baseDirectoryOpt != null) { result.AddRange(GetItemsInDirectory(_baseDirectoryOpt, cancellationToken)); } // roots if (PathUtilities.IsUnixLikePlatform) { result.AddRange(CreateUnixRoot()); } else { foreach (var drive in GetLogicalDrives()) { result.Add(CreateLogicalDriveItem(drive.TrimEnd(s_windowsDirectorySeparator))); } result.Add(CreateNetworkRoot()); } // entries on search paths foreach (var searchPath in _searchPaths) { result.AddRange(GetItemsInDirectory(searchPath, cancellationToken)); } break; case PathKind.Absolute: case PathKind.RelativeToCurrentDirectory: case PathKind.RelativeToCurrentParent: case PathKind.RelativeToCurrentRoot: var fullDirectoryPath = FileUtilities.ResolveRelativePath(directoryPath, basePath: null, baseDirectory: _baseDirectoryOpt); if (fullDirectoryPath != null) { result.AddRange(GetItemsInDirectory(fullDirectoryPath, cancellationToken)); } else { // invalid path result.Clear(); } break; case PathKind.Relative: // base directory: if (_baseDirectoryOpt != null) { result.AddRange(GetItemsInDirectory(PathUtilities.CombineAbsoluteAndRelativePaths(_baseDirectoryOpt, directoryPath), cancellationToken)); } // search paths: foreach (var searchPath in _searchPaths) { result.AddRange(GetItemsInDirectory(PathUtilities.CombineAbsoluteAndRelativePaths(searchPath, directoryPath), cancellationToken)); } break; case PathKind.RelativeToDriveDirectory: // Paths "C:dir" are not supported, but when the path doesn't include any directory, i.e. "C:", // we return the drive itself. if (directoryPath.Length == 2) { result.Add(CreateLogicalDriveItem(directoryPath)); } break; default: throw ExceptionUtilities.UnexpectedValue(pathKind); } return(result.ToImmutableAndFree()); }
private List <string> PrepForWrite() { // If the user did not specify a place to put log messages, make one for them. if (Log == null) { Log = new StringWriter(); } Stopwatch sw = Stopwatch.StartNew(); // Compute input & temp files. var dir = Path.GetDirectoryName(m_etlFilePath); if (dir.Length == 0) { dir = "."; } var baseName = Path.GetFileNameWithoutExtension(m_etlFilePath); List <string> mergeInputs = new List <string>(); mergeInputs.Add(m_etlFilePath); mergeInputs.AddRange(Directory.GetFiles(dir, baseName + ".kernel*.etl")); mergeInputs.AddRange(Directory.GetFiles(dir, baseName + ".clr*.etl")); mergeInputs.AddRange(Directory.GetFiles(dir, baseName + ".user*.etl")); string tempName = Path.ChangeExtension(m_etlFilePath, ".etl.new"); List <string> pdbFileList = null; try { // Do the merge and NGEN pdb lookup in parallel Task mergeWorker = Task.Factory.StartNew(delegate { if (Merge) { var startTime = DateTime.UtcNow; Log.WriteLine("Starting Merging of {0}", m_etlFilePath); TraceEventMergeOptions options = TraceEventMergeOptions.None; if (CompressETL) { options |= TraceEventMergeOptions.Compress; } // Do the merge TraceEventSession.Merge(mergeInputs.ToArray(), tempName, options); Log.WriteLine("Merging took {0:f1} sec", (DateTime.UtcNow - startTime).TotalSeconds); } else { Log.WriteLine("Merge == false, skipping Merge operation."); } }); Task pdbWorker = Task.Factory.StartNew(delegate { if (NGenSymbolFiles) { var startTime = DateTime.UtcNow; Log.WriteLine("Starting Generating NGEN pdbs for {0}", m_etlFilePath); var symbolReader = SymbolReader; if (symbolReader == null) { symbolReader = new SymbolReader(Log); } pdbFileList = GetNGenPdbs(m_etlFilePath, symbolReader, Log); Log.WriteLine("Generating NGEN Pdbs took {0:f1} sec", (DateTime.UtcNow - startTime).TotalSeconds); } else { Log.WriteLine("NGenSymbolFiles == false, skipping NGEN pdb generation"); } }); Task.WaitAll(mergeWorker, pdbWorker); if (File.Exists(tempName)) { // Delete/move the original files after the two worker threads finished execution to avoid races. foreach (var mergeInput in mergeInputs) { FileUtilities.ForceDelete(mergeInput); } Log.WriteLine("Moving {0} to {1}", tempName, m_etlFilePath); // Place the output in its final resting place. FileUtilities.ForceMove(tempName, m_etlFilePath); } } finally { Log.WriteLine("Deleting temp file"); if (File.Exists(tempName)) { File.Delete(tempName); } } sw.Stop(); Log.WriteLine("Merge took {0:f3} sec.", sw.Elapsed.TotalSeconds); Log.WriteLine("Merge output file {0}", m_etlFilePath); return(pdbFileList); }
/// <summary> /// Searches upward for a directory containing the specified file, beginning in the specified directory. /// </summary> /// <param name="startingDirectory">The directory to start the search in.</param> /// <param name="fileName">The name of the file to search for.</param> /// <param name="fileSystem">The file system abstraction to use that implements file system operations</param> /// <returns>The full path of the directory containing the file if it is found, otherwise an empty string. </returns> internal static string GetDirectoryNameOfFileAbove(string startingDirectory, string fileName, IFileSystem fileSystem) { return(FileUtilities.GetDirectoryNameOfFileAbove(startingDirectory, fileName, fileSystem)); }
/// <summary> /// After setting any properties to override default behavior, calling this method /// will actually do the unpacking. /// </summary> public void UnpackAchive() { if (Log == null) { Log = new StringWriter(); } if (EtlFileName == null) { if (m_zipFilePath.EndsWith(".etl.zip", StringComparison.OrdinalIgnoreCase)) { EtlFileName = m_zipFilePath.Substring(0, m_zipFilePath.Length - 4); } else { EtlFileName = Path.ChangeExtension(m_zipFilePath, ".etl"); } } if (SymbolDirectory == null) { SymbolDirectory = new SymbolPath(SymbolPath.SymbolPathFromEnvironment).DefaultSymbolCache(); } Stopwatch sw = Stopwatch.StartNew(); Log.WriteLine("[Decompressing {0}]", m_zipFilePath); Log.WriteLine("Generating output file {0}", EtlFileName); using (var zipArchive = ZipFile.OpenRead(m_zipFilePath)) { bool seenEtlFile = false; foreach (var entry in zipArchive.Entries) { if (entry.Length == 0) // Skip directories. { continue; } var archivePath = entry.FullName; if (archivePath.EndsWith(".pdb", StringComparison.OrdinalIgnoreCase)) { archivePath = archivePath.Replace('/', '\\'); // normalize separator convention string pdbRelativePath = null; if (archivePath.StartsWith(@"symbols\", StringComparison.OrdinalIgnoreCase)) { pdbRelativePath = archivePath.Substring(8); } else if (archivePath.StartsWith(@"ngenpdbs\", StringComparison.OrdinalIgnoreCase)) { pdbRelativePath = archivePath.Substring(9); } else { var m = Regex.Match(archivePath, @"^[^\\]+\.ngenpdbs?\\(.*)", RegexOptions.IgnoreCase); if (m.Success) { pdbRelativePath = m.Groups[1].Value; } else { // .diagsession files (created by the Visual Studio Diagnostic Hub) put PDBs in a path like // 194BAE98-C4ED-470E-9204-1F9389FC9DC1\symcache\xyz.pdb m = Regex.Match(archivePath, @"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\\symcache\\(.*)", RegexOptions.IgnoreCase); if (m.Success) { pdbRelativePath = m.Groups[1].Value; } else { Log.WriteLine("WARNING: found PDB file that was not in a symbol server style directory, skipping extraction"); Log.WriteLine(" Unzip this ETL and PDB by hand to use this PDB."); continue; } } } var pdbTargetPath = Path.Combine(SymbolDirectory, pdbRelativePath); var pdbTargetName = Path.GetFileName(pdbTargetPath); if (File.Exists(pdbTargetPath) && (new System.IO.FileInfo(pdbTargetPath).Length == entry.Length)) { Log.WriteLine("PDB {0} exists, skipping", pdbRelativePath); continue; } // There is a possibilty that you want to put symbol file using symbol server conventions // (in which case it is X.pdb\NNNN\X.pdb, but you already have a file named X.pdb) detect // this and delete the file if necessary. var firstNameInRelativePath = pdbRelativePath; var sepIdx = firstNameInRelativePath.IndexOf('\\'); if (sepIdx >= 0) { firstNameInRelativePath = firstNameInRelativePath.Substring(0, sepIdx); } var firstNamePath = Path.Combine(SymbolDirectory, firstNameInRelativePath); if (File.Exists(firstNamePath)) { Log.WriteLine("Deleting pdb file that is in the way {0}", firstNamePath); FileUtilities.ForceDelete(firstNamePath); } Log.WriteLine("Extracting PDB {0}", pdbRelativePath); AtomicExtract(entry, pdbTargetPath); } else if (archivePath.EndsWith(".etl", StringComparison.OrdinalIgnoreCase)) { if (seenEtlFile) { throw new ApplicationException("The ZIP file does not have exactly 1 ETL file in it, can't auto-extract."); } seenEtlFile = true; AtomicExtract(entry, EtlFileName); Log.WriteLine("Extracting {0} Zipped size = {1:f3} MB Unzipped = {2:f3} MB", EtlFileName, entry.CompressedLength / 1000000.0, entry.Length / 1000000.0); } else if (archivePath == "LogFile.txt") { string logFilePath = Path.ChangeExtension(EtlFileName, ".LogFile.txt"); Log.WriteLine("Extracting LogFile.txt to {0}", logFilePath); AtomicExtract(entry, logFilePath); } else { Log.WriteLine("Skipping unknown file {0}", archivePath); // TODO do something with these? } } if (!seenEtlFile) { throw new ApplicationException("The ZIP file does not have any ETL files in it!"); } Log.WriteLine("Finished decompression, took {0:f0} sec", sw.Elapsed.TotalSeconds); } }
/// <summary> /// Searches upward for the specified file, beginning in the specified <see cref="IElementLocation"/>. /// </summary> /// <param name="file">The name of the file to search for.</param> /// <param name="startingDirectory">An optional directory to start the search in. The default location is the directory /// <param name="fileSystem">The file system abstraction to use that implements file system operations</param> /// of the file containing the property function.</param> /// <returns>The full path of the file if it is found, otherwise an empty string.</returns> internal static string GetPathOfFileAbove(string file, string startingDirectory, IFileSystem fileSystem) { return(FileUtilities.GetPathOfFileAbove(file, startingDirectory, fileSystem)); }
/// <summary> /// Execute the task. /// </summary> public override bool Execute() { if (SuggestedRedirects == null || SuggestedRedirects.Length == 0) { Log.LogMessageFromResources("GenerateBindingRedirects.NoSuggestedRedirects"); OutputAppConfigFile = null; return(true); } var redirects = ParseSuggestedRedirects(); var doc = LoadAppConfig(AppConfigFile); if (doc == null) { return(false); } XElement runtimeNode = doc.Root .Nodes() .OfType <XElement>() .FirstOrDefault(e => e.Name.LocalName == "runtime"); if (runtimeNode == null) { runtimeNode = new XElement("runtime"); doc.Root.Add(runtimeNode); } else { UpdateExistingBindingRedirects(runtimeNode, redirects); } var ns = XNamespace.Get("urn:schemas-microsoft-com:asm.v1"); var redirectNodes = from redirect in redirects select new XElement( ns + "assemblyBinding", new XElement( ns + "dependentAssembly", new XElement( ns + "assemblyIdentity", new XAttribute("name", redirect.Key.Name), new XAttribute("publicKeyToken", ResolveAssemblyReference.ByteArrayToString(redirect.Key.GetPublicKeyToken())), new XAttribute("culture", String.IsNullOrEmpty(redirect.Key.CultureName) ? "neutral" : redirect.Key.CultureName)), new XElement( ns + "bindingRedirect", new XAttribute("oldVersion", "0.0.0.0-" + redirect.Value), new XAttribute("newVersion", redirect.Value)))); runtimeNode.Add(redirectNodes); var writeOutput = true; if (FileSystems.Default.FileExists(OutputAppConfigFile.ItemSpec)) { try { var outputDoc = LoadAppConfig(OutputAppConfigFile); if (outputDoc.ToString() == doc.ToString()) { writeOutput = false; } } catch (System.Xml.XmlException) { writeOutput = true; } } if (AppConfigFile != null) { AppConfigFile.CopyMetadataTo(OutputAppConfigFile); } else { OutputAppConfigFile.SetMetadata(ItemMetadataNames.targetPath, TargetName); } if (writeOutput) { using (var stream = FileUtilities.OpenWrite(OutputAppConfigFile.ItemSpec, false)) { doc.Save(stream); } } return(!Log.HasLoggedErrors); }
/// <summary> /// If the given path doesn't have a trailing slash then add one. /// If the path is an empty string, does not modify it. /// </summary> /// <param name="path">The path to check.</param> /// <returns>The specified path with a trailing slash.</returns> internal static string EnsureTrailingSlash(string path) { return(FileUtilities.EnsureTrailingSlash(path)); }
public static void FinalizeTLogs(bool trackedOperationsSucceeded, ITaskItem[] readTLogNames, ITaskItem[] writeTLogNames, ITaskItem[] trackedFilesToRemoveFromTLogs) { // Read the input table, skipping missing files FlatTrackingData inputs = new FlatTrackingData(readTLogNames, true); // Read the output table, skipping missing files FlatTrackingData outputs = new FlatTrackingData(writeTLogNames, true); // If we failed we need to clean the Tlogs if (!trackedOperationsSucceeded) { // If the tool errors in some way, we assume that any and all inputs and outputs it wrote during // execution are wrong. So we compact the read and write tlogs to remove the entries for the // set of sources being compiled - the next incremental build will find no entries // and correctly cause the sources to be compiled // Remove all from inputs tlog inputs.DependencyTable.Clear(); inputs.SaveTlog(); // Remove all from outputs tlog outputs.DependencyTable.Clear(); outputs.SaveTlog(); } else { // If all went well with the tool execution, then compact the tlogs // to remove any files that are no longer on disk. // This removes any temporary files from the dependency graph // In addition to temporary file removal, an optional set of files to remove may be been supplied if (trackedFilesToRemoveFromTLogs != null && trackedFilesToRemoveFromTLogs.Length > 0) { IDictionary <string, ITaskItem> trackedFilesToRemove = new Dictionary <string, ITaskItem>(StringComparer.OrdinalIgnoreCase); foreach (ITaskItem removeFile in trackedFilesToRemoveFromTLogs) { trackedFilesToRemove.Add(FileUtilities.NormalizePath(removeFile.ItemSpec), removeFile); } // UNDONE: If necessary we could have two independent sets of "ignore" files, one for inputs and one for outputs // Use an anonymous method to encapsulate the contains check for the output tlogs outputs.SaveTlog(delegate(string fullTrackedPath) { // We need to answer the question "should fullTrackedPath be included in the TLog?" return(!trackedFilesToRemove.ContainsKey(fullTrackedPath)); } ); // Use an anonymous method to encapsulate the contains check for the input tlogs inputs.SaveTlog(delegate(string fullTrackedPath) { // We need to answer the question "should fullTrackedPath be included in the TLog?" return(!trackedFilesToRemove.ContainsKey(fullTrackedPath)); } ); } else { // Compact the write tlog outputs.SaveTlog(); // Compact the read tlog inputs.SaveTlog(); } } }
/// <summary> /// Gets the canonicalized full path of the provided path and ensures it contains the correct directory separator characters for the current operating system. /// </summary> /// <param name="path">One or more paths to combine and normalize.</param> /// <returns>A canonicalized full path with the correct directory separators.</returns> internal static string NormalizePath(params string[] path) { return(FileUtilities.NormalizePath(path)); }
public new void RemoveMetadataReference(string referencePath) { referencePath = FileUtilities.NormalizeAbsolutePath(referencePath); base.RemoveMetadataReference(referencePath); }
// Resolves any conflicts for the given output path. // Returns a non-conflicting output path. // May return the same value if there are no conflicts. // null means cancel. public string ResolveOutputPathConflicts(string initialOutputPath, HashSet <string> excludedPaths, bool isBatch) { HashSet <string> queuedFiles = excludedPaths; bool? conflict = Utilities.FileExists(initialOutputPath, queuedFiles); if (conflict == null) { return(initialOutputPath); } WhenFileExists preference; if (isBatch) { preference = CustomConfig.WhenFileExistsBatch; } else { preference = CustomConfig.WhenFileExists; } switch (preference) { case WhenFileExists.Prompt: break; case WhenFileExists.Overwrite: return(initialOutputPath); case WhenFileExists.AutoRename: return(FileUtilities.CreateUniqueFileName(initialOutputPath, queuedFiles)); default: throw new ArgumentOutOfRangeException(); } // Continue and prompt user for resolution string dialogMessageTemplate; if ((bool)conflict) { dialogMessageTemplate = MiscRes.FileConflictWarning; } else { dialogMessageTemplate = MiscRes.QueueFileConflictWarning; } string dialogMessage = string.Format(dialogMessageTemplate, initialOutputPath); var conflictDialog = new CustomMessageDialogViewModel <FileConflictResolution>( MiscRes.FileConflictDialogTitle, dialogMessage, new List <CustomDialogButton <FileConflictResolution> > { new CustomDialogButton <FileConflictResolution>(FileConflictResolution.Overwrite, MiscRes.OverwriteButton, ButtonType.Default), new CustomDialogButton <FileConflictResolution>(FileConflictResolution.AutoRename, MiscRes.AutoRenameButton), new CustomDialogButton <FileConflictResolution>(FileConflictResolution.Cancel, CommonRes.Cancel, ButtonType.Cancel), }); Ioc.Get <IWindowManager>().OpenDialog(conflictDialog); switch (conflictDialog.Result) { case FileConflictResolution.Cancel: return(null); case FileConflictResolution.Overwrite: return(initialOutputPath); case FileConflictResolution.AutoRename: return(FileUtilities.CreateUniqueFileName(initialOutputPath, queuedFiles)); default: throw new ArgumentOutOfRangeException(); } }
private static void LoadWorldDirect(bool loadFromCloud) { WorldFile.IsWorldOnCloud = loadFromCloud; Main.checkXMas(); Main.checkHalloween(); bool flag = loadFromCloud && SocialAPI.Cloud != null; if (!FileUtilities.Exists(Main.worldPathName, flag) && Main.autoGen) { if (!flag) { for (int i = Main.worldPathName.Length - 1; i >= 0; i--) { if (Main.worldPathName.Substring(i, 1) == (Path.DirectorySeparatorChar.ToString() ?? "")) { Directory.CreateDirectory(Main.worldPathName.Substring(0, i)); break; } } } WorldGen.clearWorld(); Main.ActiveWorldFileData = WorldFile.CreateMetadata((Main.worldName == "") ? "World" : Main.worldName, flag, Main.expertMode); string text = (Main.AutogenSeedName ?? "").Trim(); if (text.Length == 0) { Main.ActiveWorldFileData.SetSeedToRandom(); } else { Main.ActiveWorldFileData.SetSeed(text); } WorldGen.generateWorld(Main.ActiveWorldFileData.Seed, Main.AutogenProgress); WorldFile.saveWorld(); } using (MemoryStream memoryStream = new MemoryStream(FileUtilities.ReadAllBytes(Main.worldPathName, flag))) { using (BinaryReader binaryReader = new BinaryReader(memoryStream)) { try { WorldGen.loadFailed = false; WorldGen.loadSuccess = false; int num = WorldFile.versionNumber = binaryReader.ReadInt32(); int num2; if (num <= 87) { // Not supported num2 = WorldFile.LoadWorld_Version1(binaryReader); } else { num2 = LoadWorld_Version2(binaryReader); } if (num < 141) { if (!loadFromCloud) { Main.ActiveWorldFileData.CreationTime = File.GetCreationTime(Main.worldPathName); } else { Main.ActiveWorldFileData.CreationTime = DateTime.Now; } } binaryReader.Close(); memoryStream.Close(); if (num2 != 0) { WorldGen.loadFailed = true; } else { WorldGen.loadSuccess = true; } if (WorldGen.loadFailed || !WorldGen.loadSuccess) { return; } WorldGen.gen = true; WorldGen.waterLine = Main.maxTilesY; Liquid.QuickWater(2, -1, -1); WorldGen.WaterCheck(); int num3 = 0; Liquid.quickSettle = true; int num4 = Liquid.numLiquid + LiquidBuffer.numLiquidBuffer; float num5 = 0f; while (Liquid.numLiquid > 0 && num3 < 100000) { num3++; float num6 = (float)(num4 - (Liquid.numLiquid + LiquidBuffer.numLiquidBuffer)) / (float)num4; if (Liquid.numLiquid + LiquidBuffer.numLiquidBuffer > num4) { num4 = Liquid.numLiquid + LiquidBuffer.numLiquidBuffer; } if (num6 > num5) { num5 = num6; } else { num6 = num5; } SetStatusText(string.Concat(new object[] { Lang.gen[27].Value, " ", (int)(num6 * 100f / 2f + 50f), "%" })); Liquid.UpdateLiquid(); } Liquid.quickSettle = false; Main.weatherCounter = WorldGen.genRand.Next(3600, 18000); Cloud.resetClouds(); WorldGen.WaterCheck(); WorldGen.gen = false; NPC.setFireFlyChance(); Main.InitLifeBytes(); if (Main.slimeRainTime > 0.0) { Main.StartSlimeRain(false); } NPC.setWorldMonsters(); } catch (Exception value) { WorldGen.loadFailed = true; WorldGen.loadSuccess = false; System.Console.WriteLine(value); try { binaryReader.Close(); memoryStream.Close(); } catch { } return; } } } EventInfo eventOnWorldLoad = typeof(WorldFile).GetEvent("OnWorldLoad", BindingFlags.Public | BindingFlags.Static); eventOnWorldLoad.GetRaiseMethod()?.Invoke(null, new object[] { }); //if (WorldFile.OnWorldLoad != null) //WorldFile.OnWorldLoad(); }
private bool RemoveExtraneousFilesAndDirectories( Func <string, bool> isPathInBuild, List <string> pathsToScrub, HashSet <string> blockedPaths, HashSet <string> nonDeletableRootDirectories, MountPathExpander mountPathExpander, bool logRemovedFiles) { int directoriesEncountered = 0; int filesEncountered = 0; int filesRemoved = 0; int directoriesRemovedRecursively = 0; using (var pm = PerformanceMeasurement.Start( m_loggingContext, Category, // The start of the scrubbing is logged before calling this function, since there are two sources of scrubbing (regular scrubbing and shared opaque scrubbing) // with particular messages (_ => {}), loggingContext => { Tracing.Logger.Log.ScrubbingFinished(loggingContext, directoriesEncountered, filesEncountered, filesRemoved, directoriesRemovedRecursively); Logger.Log.BulkStatistic( loggingContext, new Dictionary <string, long> { [I($"{Category}.DirectoriesEncountered")] = directoriesEncountered, [I($"{Category}.FilesEncountered")] = filesEncountered, [I($"{Category}.FilesRemoved")] = filesRemoved, [I($"{Category}.DirectoriesRemovedRecursively")] = directoriesRemovedRecursively, }); })) using (var timer = new Timer( o => { // We don't have a good proxy for how much scrubbing is left. Instead we use the file counters to at least show progress Tracing.Logger.Log.ScrubbingStatus(m_loggingContext, filesEncountered); }, null, dueTime: BuildXLEngine.GetTimerUpdatePeriodInMs(m_loggingConfiguration), period: BuildXLEngine.GetTimerUpdatePeriodInMs(m_loggingConfiguration))) { var deletableDirectoryCandidates = new ConcurrentDictionary <string, bool>(StringComparer.OrdinalIgnoreCase); var nondeletableDirectories = new ConcurrentDictionary <string, bool>(StringComparer.OrdinalIgnoreCase); var directoriesToEnumerate = new BlockingCollection <string>(); foreach (var path in pathsToScrub) { SemanticPathInfo foundSemanticPathInfo; if (blockedPaths.Contains(path)) { continue; } if (ValidateDirectory(mountPathExpander, path, out foundSemanticPathInfo)) { if (!isPathInBuild(path)) { directoriesToEnumerate.Add(path); } else { nondeletableDirectories.TryAdd(path, true); } } else { string mountName = "Invalid"; string mountPath = "Invalid"; if (mountPathExpander != null && foundSemanticPathInfo.IsValid) { mountName = foundSemanticPathInfo.RootName.ToString(mountPathExpander.PathTable.StringTable); mountPath = foundSemanticPathInfo.Root.ToString(mountPathExpander.PathTable); } Tracing.Logger.Log.ScrubbingFailedBecauseDirectoryIsNotScrubbable(pm.LoggingContext, path, mountName, mountPath); } } var cleaningThreads = new Thread[m_maxDegreeParallelism]; int pending = directoriesToEnumerate.Count; if (directoriesToEnumerate.Count == 0) { directoriesToEnumerate.CompleteAdding(); } for (int i = 0; i < m_maxDegreeParallelism; i++) { var t = new Thread(() => { while (!directoriesToEnumerate.IsCompleted && !m_cancellationToken.IsCancellationRequested) { string currentDirectory; if (directoriesToEnumerate.TryTake(out currentDirectory, Timeout.Infinite)) { Interlocked.Increment(ref directoriesEncountered); bool shouldDeleteCurrentDirectory = true; var result = FileUtilities.EnumerateDirectoryEntries( currentDirectory, false, (dir, fileName, attributes) => { string fullPath = Path.Combine(dir, fileName); // Skip specifically blocked paths. if (blockedPaths.Contains(fullPath)) { shouldDeleteCurrentDirectory = false; return; } // important to not follow directory symlinks because that can cause // re-enumerating and scrubbing the same physical folder multiple times if (FileUtilities.IsDirectoryNoFollow(attributes)) { if (nondeletableDirectories.ContainsKey(fullPath)) { shouldDeleteCurrentDirectory = false; } if (!isPathInBuild(fullPath)) { // Current directory is not in the build, then recurse to its members. Interlocked.Increment(ref pending); directoriesToEnumerate.Add(fullPath); if (!nonDeletableRootDirectories.Contains(fullPath)) { // Current directory can be deleted, then it is a candidate to be deleted. deletableDirectoryCandidates.TryAdd(fullPath, true); } else { // Current directory can't be deleted (e.g., the root of a mount), then don't delete it. // However, note that we recurse to its members to find all extraneous directories and files. shouldDeleteCurrentDirectory = false; } } else { // Current directory is in the build, i.e., directory is an output directory. // Stop recursive directory traversal because none of its members should be deleted. shouldDeleteCurrentDirectory = false; } } else { Interlocked.Increment(ref filesEncountered); if (!isPathInBuild(fullPath)) { // File is not in the build, delete it. if (TryDeleteFile(pm.LoggingContext, fullPath, logRemovedFiles)) { Interlocked.Increment(ref filesRemoved); } } else { // File is in the build, then don't delete it, but mark the current directory that // it should not be deleted. shouldDeleteCurrentDirectory = false; } } }); if (!result.Succeeded) { // Different trace levels based on result. if (result.Status != EnumerateDirectoryStatus.SearchDirectoryNotFound) { Tracing.Logger.Log.ScrubbingFailedToEnumerateDirectory( pm.LoggingContext, currentDirectory, result.Status.ToString()); } } if (!shouldDeleteCurrentDirectory) { // If directory should not be deleted, then all of its parents should not be deleted. int index; string preservedDirectory = currentDirectory; bool added; do { added = nondeletableDirectories.TryAdd(preservedDirectory, true); }while (added && (index = preservedDirectory.LastIndexOf(Path.DirectorySeparatorChar)) != -1 && !string.IsNullOrEmpty(preservedDirectory = preservedDirectory.Substring(0, index))); } Interlocked.Decrement(ref pending); } if (Volatile.Read(ref pending) == 0) { directoriesToEnumerate.CompleteAdding(); } } }); t.Start(); cleaningThreads[i] = t; } foreach (var t in cleaningThreads) { t.Join(); } // Collect all directories that need to be deleted. var deleteableDirectories = new HashSet <string>(deletableDirectoryCandidates.Keys, StringComparer.OrdinalIgnoreCase); deleteableDirectories.ExceptWith(nondeletableDirectories.Keys); // Delete directories by considering only the top-most ones. try { Parallel.ForEach( CollapsePaths(deleteableDirectories).ToList(), new ParallelOptions { MaxDegreeOfParallelism = m_maxDegreeParallelism, CancellationToken = m_cancellationToken, }, directory => { try { FileUtilities.DeleteDirectoryContents(directory, deleteRootDirectory: true, tempDirectoryCleaner: m_tempDirectoryCleaner); Interlocked.Increment(ref directoriesRemovedRecursively); } catch (BuildXLException ex) { Tracing.Logger.Log.ScrubbingExternalFileOrDirectoryFailed( pm.LoggingContext, directory, ex.LogEventMessage); } }); } catch (OperationCanceledException) { } return(true); } }
/// <summary> /// Writes a bool to the file /// </summary> /// <returns>Number of bytes used to store the bool</returns> internal int WriteBoolean(bool val) { byteBuffer[0] = 0x01; // length byteBuffer[1] = (byte)(val ? 0x01 : 0x00); return(FileUtilities.WriteWithLength(fileStream, byteBuffer, 2)); }
/// <summary> /// Generates a random directory name for the profile. /// </summary> /// <returns>A random directory name for the profile.</returns> private static string GenerateProfileDirectoryName() { return(FileUtilities.GenerateRandomTempDirectoryName("anonymous.{0}.webdriver-profile")); }
public void Save() { FileUtilities.WriteAllBytes(this.Path, Encoding.ASCII.GetBytes(JsonConvert.SerializeObject((object)this._data, (Formatting)1)), this.IsCloudSave); }
public TransientTestFolder() { FolderPath = FileUtilities.GetTemporaryDirectory(); }