/// <summary> /// Checks if we actually need to download of if the disk is up to date. /// </summary> /// <returns>Returns EvaluationResult.Continue if we still need to download, else the result will be what should be returned</returns> private async Task <EvaluationResult> CheckIfDownloadIsNeededAsync(DownloadData downloadData) { try { var downloadFilePath = downloadData.DownloadedFilePath.ToString(m_context.PathTable); // Check if the file already exists and matches the exected hash. if (File.Exists(downloadFilePath)) { // Compare actual hash to compare if we need to download again. var actualHash = await GetContentHashAsync(downloadData.DownloadedFilePath); // Compare against the static hash value. if (downloadData.ContentHash.HasValue && actualHash == downloadData.ContentHash.Value) { return(new EvaluationResult(FileArtifact.CreateSourceFile(downloadData.DownloadedFilePath))); } var incrementalState = await DownloadIncrementalState.TryLoadAsync(m_logger, m_context, downloadData); if (incrementalState != null && incrementalState.ContentHash == actualHash) { return(new EvaluationResult(FileArtifact.CreateSourceFile(downloadData.DownloadedFilePath))); } } } catch (IOException e) { m_logger.ErrorCheckingIncrementality(m_context.LoggingContext, downloadData.Settings.ModuleName, e.Message); return(EvaluationResult.Error); } catch (UnauthorizedAccessException e) { m_logger.ErrorCheckingIncrementality(m_context.LoggingContext, downloadData.Settings.ModuleName, e.Message); return(EvaluationResult.Error); } // Download is needed return(EvaluationResult.Continue); }
internal void UpdateDataForDownloadData(DownloadData downloadData, FrontEndContext context = null) { context = context ?? m_context; Contract.Assert(context != null); var name = downloadData.Settings.ModuleName; var moduleId = ModuleId.Create(context.StringTable, name); var descriptor = new ModuleDescriptor(moduleId, name, name, string.Empty, Kind, Name); var definition = ModuleDefinition.CreateModuleDefinitionWithImplicitReferences( descriptor, downloadData.ModuleRoot, downloadData.ModuleConfigFile, new[] { downloadData.ModuleSpecFile }, allowedModuleDependencies: null, cyclicalFriendModules: null); // A Download package does not have any module dependency restrictions nor whitelists cycles m_descriptors.Add(descriptor); m_descriptorsByName.Add(name, descriptor); m_descriptorsBySpecPath.Add(downloadData.ModuleSpecFile, descriptor); m_definitions.Add(descriptor, definition); }
private string CreateDownloadPip(DownloadData data) { string downloadDirectory = data.DownloadedFilePath.GetParent(m_context.PathTable).ToString(m_context.PathTable); // The download pip is flagged with isLight, since it is mostly network intensive // We disable reparse point resolving for this pip (and the extract one below) since the frontend directory // sometimes is placed under a junction (typically when CB junction outputs) and with full reparse point resolution enabled this // would generate a DFA. We know these pips do not interact with reparse points, so this is safe. return($@"<TransformerExecuteResult> _PreludeAmbientHack_Transformer.execute({{ tool: downloadTool, tags: ['download'], description: 'Downloading \""{data.DownloadUri}\""', workingDirectory: d`{downloadDirectory}`, arguments: [ {{name: '/url:', value: '{data.DownloadUri}'}}, {{name: '/downloadDirectory:', value: p`{downloadDirectory}`}}, {(data.ContentHash.HasValue? $"{{name: '/hash:', value: '{data.ContentHash.Value}'}}," : string.Empty)} {(!string.IsNullOrEmpty(data.Settings.FileName) ? $"{{name: '/fileName:', value: '{data.Settings.FileName}'}}," : string.Empty)} ], outputs: [f`{data.DownloadedFilePath.ToString(m_context.PathTable)}`], isLight: true, unsafe: {{disableFullReparsePointResolving: true}}, }});"); }
private bool ValidateAndExtractDownloadData( FrontEndContext context, IDownloadFileSettings downloadSettings, Dictionary <string, DownloadData> downloads, AbsolutePath resolverFolder, out DownloadData downloadData) { downloadData = null; if (string.IsNullOrEmpty(downloadSettings.ModuleName)) { Logger.Log.DownloadFrontendMissingModuleId(m_context.LoggingContext, downloadSettings.Url); return(false); } if (downloads.ContainsKey(downloadSettings.ModuleName)) { Logger.Log.DownloadFrontendDuplicateModuleId(m_context.LoggingContext, downloadSettings.ModuleName, Kind, Name); return(false); } if (string.IsNullOrEmpty(downloadSettings.Url)) { Logger.Log.DownloadFrontendMissingUrl(m_context.LoggingContext, downloadSettings.ModuleName); return(false); } if (!Uri.TryCreate(downloadSettings.Url, UriKind.Absolute, out var downloadLocation)) { Logger.Log.DownloadFrontendInvalidUrl(m_context.LoggingContext, downloadSettings.ModuleName, downloadSettings.Url); return(false); } ContentHash?contentHash; if (string.IsNullOrEmpty(downloadSettings.Hash)) { contentHash = null; } else { if (!ContentHash.TryParse(downloadSettings.Hash, out var hash)) { Logger.Log.DownloadFrontendHashValueNotValidContentHash(m_context.LoggingContext, downloadSettings.ModuleName, downloadSettings.Url, downloadSettings.Hash); return(false); } contentHash = hash; } // For these last two validations, the TS parser would also complain, but we just give a more targeted error before that happens. if (!string.IsNullOrEmpty(downloadSettings.DownloadedValueName) && !SymbolAtom.TryCreate(context.StringTable, downloadSettings.DownloadedValueName, out _)) { Logger.Log.NameContainsInvalidCharacters(m_context.LoggingContext, "downloadedValueName", downloadSettings.DownloadedValueName); return(false); } if (!string.IsNullOrEmpty(downloadSettings.ExtractedValueName) && !SymbolAtom.TryCreate(context.StringTable, downloadSettings.ExtractedValueName, out _)) { Logger.Log.NameContainsInvalidCharacters(m_context.LoggingContext, "extractedValueName", downloadSettings.ExtractedValueName); return(false); } downloadData = new DownloadData(context, downloadSettings, downloadLocation, resolverFolder, contentHash, downloadSettings.DownloadedValueName, downloadSettings.ExtractedValueName); return(true); }
/// <nodoc /> public static async Task <DownloadIncrementalState> TryLoadAsync(Logger logger, FrontEndContext context, DownloadData downloadData) { var manifestFilePath = downloadData.DownloadManifestFile.ToString(context.PathTable); DownloadIncrementalState result = null; if (!FileUtilities.Exists(manifestFilePath)) { return(null); } using (var reader = new StreamReader(manifestFilePath)) { var versionLine = await reader.ReadLineAsync(); if (!string.Equals(versionLine, ManifestVersion, StringComparison.Ordinal)) { logger.DownloadManifestDoesNotMatch(context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, "versionLine", ManifestVersion, versionLine); return(null); } var urlLine = await reader.ReadLineAsync(); if (!string.Equals(urlLine, downloadData.Settings.Url, StringComparison.Ordinal)) { logger.DownloadManifestDoesNotMatch(context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, "url", downloadData.Settings.Url, urlLine); return(null); } var fileNameLine = await reader.ReadLineAsync(); if (!string.Equals(fileNameLine, downloadData.Settings.FileName, StringComparison.Ordinal)) { logger.DownloadManifestDoesNotMatch(context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, "fileName", downloadData.Settings.FileName, fileNameLine); return(null); } var hashLine = await reader.ReadLineAsync(); if (hashLine == null || !ContentHash.TryParse(hashLine, out var expectedHash)) { return(null); } result = new DownloadIncrementalState(downloadData, expectedHash); } return(result); }
/// <nodoc /> public DownloadIncrementalState(DownloadData downloadData, ContentHash contentHash) { m_downloadData = downloadData; ContentHash = contentHash; }
private EvaluationResult SealDirectory(PipConstructionHelper pipConstructionHelper, DownloadData downloadData, DirectoryArtifact directory, SortedReadOnlyArray <FileArtifact, OrdinalFileArtifactComparer> files) { if (!pipConstructionHelper.TrySealDirectory( directory, files, CollectionUtilities.EmptySortedReadOnlyArray <DirectoryArtifact, OrdinalDirectoryArtifactComparer>(OrdinalDirectoryArtifactComparer.Instance), Pips.Operations.SealDirectoryKind.Partial, null, null, null, out var directoryArtifact) ) { return(EvaluationResult.Error); } var staticDirectory = new StaticDirectory(directoryArtifact, Pips.Operations.SealDirectoryKind.Partial, files.WithCompatibleComparer(OrdinalPathOnlyFileArtifactComparer.Instance)); return(new EvaluationResult(staticDirectory)); }
/// <nodoc /> private async Task <EvaluationResult> ValidateAndStoreIncrementalExtractState(PipConstructionHelper pipConstructionHelper, DownloadData downloadData) { var archive = downloadData.DownloadedFilePath.ToString(m_context.PathTable); var target = downloadData.ContentsFolder.Path.ToString(m_context.PathTable); try { var allFiles = new List <FileArtifact>(); var enumeratResult = FileUtilities.EnumerateDirectoryEntries(target, true, "*", (dir, fileName, attributes) => { if ((attributes & FileAttributes.Directory) == 0) { var filePath = Path.Combine(dir, fileName); allFiles.Add(FileArtifact.CreateSourceFile(AbsolutePath.Create(m_context.PathTable, filePath))); } }); if (!enumeratResult.Succeeded) { var error = new Win32Exception(enumeratResult.NativeErrorCode); m_logger.ErrorListingPackageContents(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target, error.Message); return(EvaluationResult.Error); } if (allFiles.Count == 0) { m_logger.ErrorListingPackageContents(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target, "file list is empty"); return(EvaluationResult.Error); } var sortedFiles = SortedReadOnlyArray <FileArtifact, OrdinalFileArtifactComparer> .CloneAndSort( allFiles, OrdinalFileArtifactComparer.Instance); var hashes = new Dictionary <AbsolutePath, ContentHash>(); foreach (var file in allFiles) { var hash = await GetContentHashAsync(file); hashes.Add(file.Path, hash); } var incrementalState = new ExtractIncrementalState(downloadData, sortedFiles, hashes); await incrementalState.SaveAsync(m_context); return(SealDirectory(pipConstructionHelper, downloadData, downloadData.ContentsFolder, sortedFiles)); } catch (Exception e) when(e is BuildXLException || e is IOException || e is UnauthorizedAccessException) { m_logger.ErrorExtractingArchive(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target, e.Message); return(EvaluationResult.Error); } }
/// <summary> /// Extract files to disk /// </summary> /// <remarks> /// At the point of authoring (Jan 2019) the BCL does not implement tar-files or bz2. /// https://github.com/dotnet/corefx/issues/3253 has been discussed since Sept 2015 /// Therefore we rely here on 3rd party library: https://github.com/icsharpcode/SharpZipLib /// </remarks> private bool TryExtractToDisk(DownloadData downloadData) { var archive = downloadData.DownloadedFilePath.ToString(m_context.PathTable); var target = downloadData.ContentsFolder.Path.ToString(m_context.PathTable); try { FileUtilities.DeleteDirectoryContents(target, false); FileUtilities.CreateDirectory(target); } catch (BuildXLException e) { m_logger.ErrorExtractingArchive(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target, e.Message); return(false); } switch (downloadData.Settings.ArchiveType) { case DownloadArchiveType.Zip: try { new FastZip().ExtractZip(archive, target, null); } catch (ZipException e) { m_logger.ErrorExtractingArchive(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target, e.Message); return(false); } break; case DownloadArchiveType.Gzip: try { var targetFile = Path.Combine( target, downloadData.DownloadedFilePath.GetName(m_context.PathTable).RemoveExtension(m_context.StringTable) .ToString(m_context.StringTable)); using (var reader = m_context.FileSystem.OpenText(downloadData.DownloadedFilePath)) using (var gzipStream = new GZipInputStream(reader.BaseStream)) using (var output = FileUtilities.CreateFileStream(targetFile, FileMode.Create, FileAccess.Write, FileShare.Read)) { byte[] buffer = new byte[4096]; StreamUtils.Copy(gzipStream, output, buffer); } } catch (GZipException e) { m_logger.ErrorExtractingArchive(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target, e.Message); return(false); } break; case DownloadArchiveType.Tar: try { using (var reader = m_context.FileSystem.OpenText(downloadData.DownloadedFilePath)) using (var tar = TarArchive.CreateInputTarArchive(reader.BaseStream)) { tar.ExtractContents(target); } } catch (TarException e) { m_logger.ErrorExtractingArchive(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target, e.Message); return(false); } break; case DownloadArchiveType.Tgz: try { using (var reader = m_context.FileSystem.OpenText(downloadData.DownloadedFilePath)) using (var gzipStream = new GZipInputStream(reader.BaseStream)) using (var tar = TarArchive.CreateInputTarArchive(gzipStream)) { tar.ExtractContents(target); } } catch (GZipException e) { m_logger.ErrorExtractingArchive(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target, e.Message); return(false); } catch (TarException e) { m_logger.ErrorExtractingArchive(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target, e.Message); return(false); } break; default: throw Contract.AssertFailure("Unexpected archive type"); } try { if (!FileUtilities.DirectoryExistsNoFollow(target)) { m_logger.ErrorNothingExtracted(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target); return(false); } } catch (BuildXLException e) { m_logger.ErrorExtractingArchive(m_context.LoggingContext, downloadData.Settings.ModuleName, archive, target, e.Message); return(false); } return(true); }
/// <nodoc /> private async Task <EvaluationResult> ValidateAndStoreIncrementalDownloadStateAsync(DownloadData downloadData) { // If the hash is given in the download setting, use the corresponding hashType(hash algorithm) to get the content hash of the downloaded file. // We don't record the file until we know it is the correct one and will be used in this build. var recordFileAccess = false; var downloadedHash = await GetContentHashAsync(downloadData.DownloadedFilePath, HashTypeParser(downloadData.Settings.Hash), recordFileAccess); if (downloadData.ContentHash.HasValue) { // Validate downloaded hash if specified if (downloadData.ContentHash != downloadedHash) { m_logger.DownloadMismatchedHash( m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, downloadData.Settings.Hash, downloadedHash.ToString()); return(EvaluationResult.Error); } } else { try { var incrementalState = new DownloadIncrementalState(downloadData, downloadedHash); await incrementalState.SaveAsync(m_context); } catch (BuildXLException e) { m_logger.ErrorStoringIncrementality(m_context.LoggingContext, downloadData.Settings.ModuleName, e.Message); return(EvaluationResult.Error); } } // Record the file with the build's default hasher. m_frontEndHost.Engine.RecordFrontEndFile(downloadData.DownloadedFilePath, Name); return(new EvaluationResult(FileArtifact.CreateSourceFile(downloadData.DownloadedFilePath))); }
/// <summary> /// Attempts to downoad the file to disk. /// </summary> /// <returns>Returns EvaluationResult.Continue if we sucessfully downloaded and need to continue to store the incremental information, else the result will be what should be returned</returns> private async Task <EvaluationResult> TryDownloadFileToDiskAsync(DownloadData downloadData) { var downloadFilePathAsString = downloadData.DownloadedFilePath.ToString(m_context.PathTable); try { FileUtilities.CreateDirectory(Path.GetDirectoryName(downloadFilePathAsString)); FileUtilities.DeleteFile(downloadFilePathAsString, retryOnFailure: true); } catch (BuildXLException e) { m_logger.ErrorPreppingForDownload(m_context.LoggingContext, downloadData.Settings.ModuleName, e.Message); return(EvaluationResult.Error); } // We have to download the file. m_logger.StartDownload(m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url); var stopwatch = Stopwatch.StartNew(); try { using (var httpClient = new HttpClient()) { httpClient.Timeout = TimeSpan.FromMinutes(10); var httpRequest = new HttpRequestMessage(HttpMethod.Get, downloadData.DownloadUri); // If the download URI is pointing to a VSTS feed and we get a valid auth token, make it part of the request // We only want to send the token over HTTPS and to a VSTS domain to avoid security issues if (IsVSTSPackageSecureURI(downloadData.DownloadUri) && await TryGetAuthenticationHeaderAsync(downloadData.DownloadUri) is var authHeader && authHeader != null) { httpRequest.Headers.Accept.Clear(); httpRequest.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); httpRequest.Headers.Authorization = authHeader; } var response = await httpClient.SendAsync(httpRequest, m_context.CancellationToken); response.EnsureSuccessStatusCode(); var stream = await response.Content.ReadAsStreamAsync(); using (var targetStream = new FileStream(downloadFilePathAsString, FileMode.Create, FileAccess.Write, FileShare.None)) { await stream.CopyToAsync(targetStream); } m_logger.Downloaded( m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, stopwatch.ElapsedMilliseconds, new FileInfo(downloadFilePathAsString).Length); } } catch (TaskCanceledException e) { string message = m_context.CancellationToken.IsCancellationRequested ? "Download manually canceled." : e.Message; m_logger.DownloadFailed(m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, message); return(EvaluationResult.Canceled); } catch (HttpRequestException e) { var message = e.InnerException == null ? e.Message : e.Message + " " + e.InnerException?.Message; m_logger.DownloadFailed(m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, message); return(EvaluationResult.Error); } catch (IOException e) { m_logger.DownloadFailed(m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, e.Message); return(EvaluationResult.Error); } catch (UnauthorizedAccessException e) { m_logger.DownloadFailed(m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, e.Message); return(EvaluationResult.Error); } // Indicate we should continue to store the incremental information return(EvaluationResult.Continue); }
/// <nodoc /> private async Task <EvaluationResult> ValidateAndStoreIncrementalDownloadStateAsync(DownloadData downloadData) { // If the hash is given in the download setting, use the corresponding hashType(hash algorithm) to get the content hash of the downloaded file. var downloadedHash = await GetContentHashAsync(downloadData.DownloadedFilePath, HashTypeParser(downloadData.Settings.Hash)); if (downloadData.ContentHash.HasValue) { // Validate downloaded hash if specified if (downloadData.ContentHash != downloadedHash) { m_logger.DownloadMismatchedHash( m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, downloadData.Settings.Hash, downloadedHash.ToString()); return(EvaluationResult.Error); } } else { try { var incrementalState = new DownloadIncrementalState(downloadData, downloadedHash); await incrementalState.SaveAsync(m_context); } catch (BuildXLException e) { m_logger.ErrorStoringIncrementality(m_context.LoggingContext, downloadData.Settings.ModuleName, e.Message); return(EvaluationResult.Error); } } return(new EvaluationResult(FileArtifact.CreateSourceFile(downloadData.DownloadedFilePath))); }
/// <summary> /// Attempts to downoad the file to disk. /// </summary> /// <returns>Returns EvaluationResult.Continue if we sucessfully downloaded and need to continue to store the incremental information, else the result will be what should be returned</returns> private async Task <EvaluationResult> TryDownloadFileToDiskAsync(DownloadData downloadData) { var downloadFilePathAsString = downloadData.DownloadedFilePath.ToString(m_context.PathTable); try { FileUtilities.CreateDirectory(Path.GetDirectoryName(downloadFilePathAsString)); FileUtilities.DeleteFile(downloadFilePathAsString, waitUntilDeletionFinished: true); } catch (BuildXLException e) { m_logger.ErrorPreppingForDownload(m_context.LoggingContext, downloadData.Settings.ModuleName, e.Message); return(EvaluationResult.Error); } // We have to download the file. m_logger.StartDownload(m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url); var stopwatch = Stopwatch.StartNew(); try { using (var httpClient = new HttpClient()) { httpClient.Timeout = TimeSpan.FromMinutes(10); var response = await httpClient.GetAsync((Uri)downloadData.DownloadUri, m_context.CancellationToken); response.EnsureSuccessStatusCode(); var stream = await response.Content.ReadAsStreamAsync(); using (var targetStream = new FileStream(downloadFilePathAsString, FileMode.Create, FileAccess.Write, FileShare.None)) { await stream.CopyToAsync(targetStream); } m_logger.Downloaded( m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, stopwatch.ElapsedMilliseconds, new FileInfo(downloadFilePathAsString).Length); } } catch (TaskCanceledException e) { string message = m_context.CancellationToken.IsCancellationRequested ? "Download manually canceled." : e.Message; m_logger.DownloadFailed(m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, message); return(EvaluationResult.Canceled); } catch (HttpRequestException e) { var message = e.InnerException == null ? e.Message : e.Message + " " + e.InnerException?.Message; m_logger.DownloadFailed(m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, message); return(EvaluationResult.Error); } catch (IOException e) { m_logger.DownloadFailed(m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, e.Message); return(EvaluationResult.Error); } catch (UnauthorizedAccessException e) { m_logger.DownloadFailed(m_context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, e.Message); return(EvaluationResult.Error); } // Indicate we should continue to store the incremental information return(EvaluationResult.Continue); }
/// <summary> /// Downloads a file with file backed manifest incremental check /// </summary> internal async Task <EvaluationResult> PerformDownloadOrIncrementalCheckAsync(DownloadData downloadData) { if (m_context.CancellationToken.IsCancellationRequested) { return(EvaluationResult.Canceled); } Statistics.Downloads.Total.Increment(); using (Statistics.Downloads.UpToDateCheckDuration.Start(downloadData.Settings.Url)) { var result = await CheckIfDownloadIsNeededAsync(downloadData); if (result.IsErrorValue) { Statistics.Downloads.Failures.Increment(); } if (result != EvaluationResult.Continue) { Statistics.Downloads.SkippedDueToManifest.Increment(); return(result); } } using (Statistics.Downloads.Duration.Start(downloadData.Settings.Url)) { var result = await TryDownloadFileToDiskAsync(downloadData); if (result.IsErrorValue) { Statistics.Downloads.Failures.Increment(); } if (result != EvaluationResult.Continue) { return(result); } } using (Statistics.Downloads.UpToDateCheckDuration.Start(downloadData.Settings.Url)) { var result = await ValidateAndStoreIncrementalDownloadStateAsync(downloadData); if (result.IsErrorValue) { Statistics.Downloads.Failures.Increment(); } return(result); } }
/// <nodoc /> public static async Task <ExtractIncrementalState> TryLoadAsync(Logger logger, FrontEndContext context, DownloadData downloadData) { var manifestFilePath = downloadData.ExtractManifestFile.ToString(context.PathTable); ExtractIncrementalState result = null; if (!FileUtilities.Exists(manifestFilePath)) { return(null); } using (var reader = new StreamReader(manifestFilePath)) { var versionLine = await reader.ReadLineAsync(); if (versionLine == null || !string.Equals(versionLine, ManifestVersion, StringComparison.Ordinal)) { logger.ExtractManifestDoesNotMatch(context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, "version", ManifestVersion, versionLine); return(null); } var urlLine = await reader.ReadLineAsync(); if (!string.Equals(urlLine, downloadData.Settings.Url, StringComparison.Ordinal)) { logger.ExtractManifestDoesNotMatch(context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, "url", downloadData.Settings.Url, urlLine); return(null); } var archiveTypeLine = await reader.ReadLineAsync(); if (archiveTypeLine == null || !Enum.TryParse <DownloadArchiveType>(archiveTypeLine, out var archiveType) || archiveType != downloadData.Settings.ArchiveType) { logger.ExtractManifestDoesNotMatch(context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, "archiveType", downloadData.Settings.ArchiveType.ToString(), archiveTypeLine); return(null); } var fileCountLine = await reader.ReadLineAsync(); if (fileCountLine == null || !uint.TryParse(fileCountLine, out var fileCount)) { return(null); } var hashes = new Dictionary <AbsolutePath, ContentHash>(); var files = new FileArtifact[fileCount]; for (int i = 0; i < fileCount; i++) { var filePathLine = await reader.ReadLineAsync(); if (filePathLine == null || !RelativePath.TryCreate(context.StringTable, filePathLine, out var relativeFilePath)) { return(null); } var hashLine = await reader.ReadLineAsync(); if (hashLine == null || !ContentHash.TryParse(hashLine, out var contentHash)) { return(null); } var filePath = downloadData.ContentsFolder.Path.Combine(context.PathTable, relativeFilePath); files[i] = FileArtifact.CreateSourceFile(filePath); hashes[filePath] = contentHash; } var sortedFiles = SortedReadOnlyArray <FileArtifact, OrdinalFileArtifactComparer> .SortUnsafe( files, OrdinalFileArtifactComparer.Instance); result = new ExtractIncrementalState(downloadData, sortedFiles, hashes); } return(result); }
/// <nodoc /> public ExtractIncrementalState(DownloadData downloadData, SortedReadOnlyArray <FileArtifact, OrdinalFileArtifactComparer> files, Dictionary <AbsolutePath, ContentHash> hashes) { m_downloadData = downloadData; Files = files; Hashes = hashes; }
/// <summary> /// Returns the module descriptor for the download data. /// </summary> internal ModuleDescriptor GetModuleDescriptor(DownloadData downloadData) { return(m_descriptorsBySpecPath[downloadData.ModuleSpecFile]); }
/// <summary> /// Extracts a file into a folder with in manifest based incrementality. /// </summary> internal async Task <EvaluationResult> PerformExtractOrIncrementalCheckAsync(DownloadData downloadData) { if (m_context.CancellationToken.IsCancellationRequested) { return(EvaluationResult.Canceled); } // Ensure file is downloaded var extractedFileResult = await DownloadFile(downloadData); if (extractedFileResult.IsErrorValue) { return(extractedFileResult); } var extractedFile = (FileArtifact)extractedFileResult.Value; var moduleDescriptor = m_workspaceResolver.GetModuleDescriptor(downloadData); var pipConstructionHelper = PipConstructionHelper.Create( m_context, m_frontEndHost.Engine.Layout.ObjectDirectory, m_frontEndHost.Engine.Layout.RedirectedDirectory, m_frontEndHost.Engine.Layout.TempDirectory, m_frontEndHost.PipGraph, moduleDescriptor.Id, moduleDescriptor.Name, RelativePath.Create(downloadData.ModuleSpecFile.GetName(m_context.PathTable)), FullSymbol.Create(m_context.SymbolTable, "extracted"), new LocationData(downloadData.ModuleSpecFile, 0, 0), m_context.QualifierTable.EmptyQualifierId); // When we don't have to extract we'll expose the downloaded file in the contents. if (downloadData.Settings.ArchiveType == DownloadArchiveType.File) { return(SealDirectory( pipConstructionHelper, downloadData, DirectoryArtifact.CreateWithZeroPartialSealId(downloadData.DownloadedFilePath.GetParent(m_context.PathTable)), SortedReadOnlyArray <FileArtifact, OrdinalFileArtifactComparer> .FromSortedArrayUnsafe( ReadOnlyArray <FileArtifact> .FromWithoutCopy(new[] { extractedFile }), OrdinalFileArtifactComparer.Instance))); } Statistics.Extractions.Total.Increment(); using (Statistics.Extractions.UpToDateCheckDuration.Start(downloadData.Settings.Url)) { var result = await CheckIfExtractIsNeededAsync(pipConstructionHelper, downloadData); if (result.IsErrorValue) { Statistics.Extractions.Failures.Increment(); } if (result != EvaluationResult.Continue) { Statistics.Extractions.SkippedDueToManifest.Increment(); return(result); } } using (Statistics.Extractions.Duration.Start(downloadData.Settings.Url)) { try { if (!await Task.Run( () => TryExtractToDisk(downloadData), m_context.CancellationToken)) { Statistics.Extractions.Failures.Increment(); return(EvaluationResult.Error); } } catch (TaskCanceledException) { return(EvaluationResult.Canceled); } } using (Statistics.Extractions.UpToDateCheckDuration.Start(downloadData.Settings.Url)) { var result = await ValidateAndStoreIncrementalExtractState(pipConstructionHelper, downloadData); if (result.IsErrorValue) { Statistics.Extractions.Failures.Increment(); } return(result); } }
/// <nodoc /> private async Task <EvaluationResult> CheckIfExtractIsNeededAsync(PipConstructionHelper pipConstructionHelper, DownloadData downloadData) { try { if (m_context.FileSystem.IsDirectory(downloadData.ContentsFolder)) { var incrementalState = await ExtractIncrementalState.TryLoadAsync(m_logger, m_context, downloadData); if (incrementalState != null) { // Check all files still have the same hash. This should use the hash cache based on USN so be very fast. foreach (var hashKv in incrementalState.Hashes) { if (!m_context.FileSystem.Exists(hashKv.Key)) { // File is not present, extraction is needed. return(EvaluationResult.Continue); } var hash = await GetContentHashAsync(hashKv.Key); if (hash != hashKv.Value) { // File has changed, extraction is needed. return(EvaluationResult.Continue); } } // All hashes verified, update the manifest await incrementalState.SaveAsync(m_context); return(SealDirectory(pipConstructionHelper, downloadData, downloadData.ContentsFolder, incrementalState.Files)); } } } catch (IOException e) { m_logger.ErrorCheckingIncrementality(m_context.LoggingContext, downloadData.Settings.ModuleName, e.Message); return(EvaluationResult.Error); } catch (UnauthorizedAccessException e) { m_logger.ErrorCheckingIncrementality(m_context.LoggingContext, downloadData.Settings.ModuleName, e.Message); return(EvaluationResult.Error); } // Extraction is needed return(EvaluationResult.Continue); }