private KeyValuePair <CacheFile, bool> EvaluateCacheFile() { CacheFile cacheFile; var newDgSpecHash = _request.DependencyGraphSpec.GetHash(); var noOp = false; if (_request.AllowNoOp && File.Exists(_request.Project.RestoreMetadata.CacheFilePath)) { cacheFile = CacheFileFormat.Load(_request.Project.RestoreMetadata.CacheFilePath, _logger); if (cacheFile.IsValid && StringComparer.Ordinal.Equals(cacheFile.DgSpecHash, newDgSpecHash)) { _logger.LogVerbose(string.Format(CultureInfo.CurrentCulture, Strings.Log_RestoreNoOpFinish, _request.Project.Name)); _success = true; noOp = true; } else { cacheFile = new CacheFile(newDgSpecHash); _logger.LogVerbose(string.Format(CultureInfo.CurrentCulture, Strings.Log_RestoreNoOpDGChanged, _request.Project.Name)); } } else { cacheFile = new CacheFile(newDgSpecHash); } return(new KeyValuePair <CacheFile, bool>(cacheFile, noOp)); }
public void Write_WhenVersionIsCurrentVersion_WritesCorrectly() { using (var workingDir = TestDirectory.Create()) { var projectFullPath = Path.Combine(workingDir, "E6E7F0F96EBE438887ED7D0B9FC88AFA"); var file1 = Path.Combine(workingDir, "DA9707B5FCFB4DA8B8BB77AD527C778C"); var file2 = Path.Combine(workingDir, "C78CE6D18C604A55BECD845F4F694A4B"); var v = "2"; var dgSpecHash = "LhkXQGGI+FQMy9dhLYjG5sWcHX3z/copzi4hjjBiY3Fotv0i7zQCikMZQ+rOKJ03gtx0hoHwIx5oKkM7sVHu7g=="; var success = "true"; var expected = $@"{{ ""version"": {v}, ""dgSpecHash"": ""{dgSpecHash}"", ""success"": {success}, ""projectFilePath"": {JsonConvert.ToString(projectFullPath)}, ""expectedPackageFiles"": [ {JsonConvert.ToString(file1)}, {JsonConvert.ToString(file2)} ], ""logs"": [ {{ ""code"": ""NU1000"", ""level"": ""Information"", ""message"": ""Test"" }} ] }}"; var cacheFile = new CacheFile(dgSpecHash) { Success = bool.Parse(success), ProjectFilePath = projectFullPath, ExpectedPackageFilePaths = new List <string> { file1, file2 }, LogMessages = new List <IAssetsLogMessage> { new AssetsLogMessage(LogLevel.Information, NuGetLogCode.NU1000, "Test") } }; using (var stream = new MemoryStream()) { CacheFileFormat.Write(stream, cacheFile); var actual = Encoding.UTF8.GetString(stream.ToArray()); Assert.Equal(expected, actual); } } }
public void CacheFileFormat_CacheFileReadCorrectly(string v, string dgSpecHash, string success, bool expectedValid) { var cacheTemplate = @"{{ ""version"": {0}, ""dgSpecHash"": ""{1}"", ""success"": {2} }}"; CacheFile cacheFile = null; using (var stream = new MemoryStream(Encoding.UTF8.GetBytes(string.Format(cacheTemplate, v, dgSpecHash, success)))) { cacheFile = CacheFileFormat.Read(stream, NullLogger.Instance, "emptyPath"); } Assert.Equal(expectedValid, cacheFile.IsValid); Assert.Equal(bool.Parse(success), cacheFile.Success); Assert.Equal(dgSpecHash, cacheFile.DgSpecHash); Assert.Equal(int.Parse(v), cacheFile.Version); }
public void CacheFileFormat_CacheFileWrittenCorrectly() { var v = "1"; var dgSpecHash = "LhkXQGGI+FQMy9dhLYjG5sWcHX3z/copzi4hjjBiY3Fotv0i7zQCikMZQ+rOKJ03gtx0hoHwIx5oKkM7sVHu7g=="; var success = "true"; var cacheTemplate = @"{{ ""version"": {0}, ""dgSpecHash"": ""{1}"", ""success"": {2} }}"; var cacheFile = new CacheFile(dgSpecHash); cacheFile.Success = bool.Parse(success); using (var stream = new MemoryStream()) { CacheFileFormat.Write(stream, cacheFile); var cacheString = Encoding.UTF8.GetString(stream.ToArray()); Assert.Equal(string.Format(cacheTemplate, v, dgSpecHash, success), cacheString); } }
private async Task CommitCacheFileAsync(ILogger log, bool toolCommit) { if (CacheFile != null && CacheFilePath != null) { // This is done to preserve the old behavior if (toolCommit) { log.LogVerbose(string.Format(CultureInfo.CurrentCulture, Strings.Log_ToolWritingCacheFile, CacheFilePath)); } else { log.LogVerbose(string.Format(CultureInfo.CurrentCulture, Strings.Log_WritingCacheFile, CacheFilePath)); } await FileUtility.ReplaceWithLock( outPath => CacheFileFormat.Write(outPath, CacheFile), CacheFilePath); } }
public void Read_WhenVersionIs1_ReadsCorrectly() { var logger = new TestLogger(); var contents = $@"{{ ""version"": ""1"", ""dgSpecHash"": ""LhkXQGGI+FQMy9dhLYjG5sWcHX3z/copzi4hjjBiY3Fotv0i7zQCikMZQ+rOKJ03gtx0hoHwIx5oKkM7sVHu7g=="", ""success"": true, }}"; CacheFile cacheFile = null; using (var stream = new MemoryStream(Encoding.UTF8.GetBytes(contents))) { cacheFile = CacheFileFormat.Read(stream, logger, "emptyPath"); } Assert.False(cacheFile.IsValid); Assert.True(cacheFile.Success); Assert.Equal("LhkXQGGI+FQMy9dhLYjG5sWcHX3z/copzi4hjjBiY3Fotv0i7zQCikMZQ+rOKJ03gtx0hoHwIx5oKkM7sVHu7g==", cacheFile.DgSpecHash); Assert.Equal(1, cacheFile.Version); Assert.Equal(0, logger.Errors); Assert.Equal(0, logger.Warnings); }
private KeyValuePair <CacheFile, bool> EvaluateCacheFile() { CacheFile cacheFile; var noOp = false; var newDgSpecHash = NoOpRestoreUtilities.GetHash(_request); if (_request.ProjectStyle == ProjectStyle.DotnetCliTool && _request.AllowNoOp) // No need to attempt to resolve the tool if no-op is not allowed. { NoOpRestoreUtilities.UpdateRequestBestMatchingToolPathsIfAvailable(_request); } if (_request.AllowNoOp && File.Exists(_request.Project.RestoreMetadata.CacheFilePath)) { cacheFile = FileUtility.SafeRead(_request.Project.RestoreMetadata.CacheFilePath, (stream, path) => CacheFileFormat.Read(stream, _logger, path)); if (cacheFile.IsValid && StringComparer.Ordinal.Equals(cacheFile.DgSpecHash, newDgSpecHash)) { _logger.LogVerbose(string.Format(CultureInfo.CurrentCulture, Strings.Log_RestoreNoOpFinish, _request.Project.Name)); _success = true; noOp = true; } else { cacheFile = new CacheFile(newDgSpecHash); _logger.LogVerbose(string.Format(CultureInfo.CurrentCulture, Strings.Log_RestoreNoOpDGChanged, _request.Project.Name)); } } else { cacheFile = new CacheFile(newDgSpecHash); } if (_request.ProjectStyle == ProjectStyle.DotnetCliTool) { if (noOp) // Only if the hash matches, then load the lock file. This is a performance hit, so we need to delay it as much as possible. { _request.ExistingLockFile = LockFileUtilities.GetLockFile(_request.LockFilePath, _request.Log); } else { // Clean up to preserve the pre no-op behavior. This should not be used, but we want to be cautious. _request.LockFilePath = null; _request.Project.RestoreMetadata.CacheFilePath = null; } } return(new KeyValuePair <CacheFile, bool>(cacheFile, noOp)); }
public void Read_WhenVersionIsCurrentVersion_ReadsCorrectly(bool haveMissingPackageFiles) { using (var workingDir = TestDirectory.Create()) { var logger = new TestLogger(); var projectFullPath = Path.Combine(workingDir, "EA11D9B8013142A6B40A81FD90F57EAA"); var dgSpecHash = "LhkXQGGI+FQMy9dhLYjG5sWcHX3z/copzi4hjjBiY3Fotv0i7zQCikMZQ+rOKJ03gtx0hoHwIx5oKkM7sVHu7g=="; var success = "true"; var file1 = Path.Combine(workingDir, "7A329DF71DDD41F689C9AD876DDF79F6"); var file2 = Path.Combine(workingDir, "C16089965CF84822A71D07580B29AF0E"); File.WriteAllText(file1, string.Empty); if (!haveMissingPackageFiles) { // Don't create one of the files File.WriteAllText(file2, string.Empty); } var version = "2"; var contents = $@"{{ ""version"": {version}, ""dgSpecHash"": ""{dgSpecHash}"", ""success"": {success}, ""projectFilePath"": {JsonConvert.ToString(projectFullPath)}, ""expectedPackageFiles"": [ {JsonConvert.ToString(file1)}, {JsonConvert.ToString(file2)} ], ""logs"": [ {{ ""code"": ""NU1000"", ""level"": ""Information"", ""message"": ""Test"" }} ] }}"; CacheFile cacheFile = null; using (var stream = new MemoryStream(Encoding.UTF8.GetBytes(contents))) { cacheFile = CacheFileFormat.Read(stream, logger, "emptyPath"); } Assert.True(cacheFile.IsValid); Assert.Equal(bool.Parse(success), cacheFile.Success); Assert.Equal(dgSpecHash, cacheFile.DgSpecHash); Assert.Equal(int.Parse(version), cacheFile.Version); if (haveMissingPackageFiles) { Assert.True(cacheFile.HasAnyMissingPackageFiles); } else { Assert.False(cacheFile.HasAnyMissingPackageFiles); } Assert.Equal(projectFullPath, cacheFile.ProjectFilePath); Assert.Equal(1, cacheFile.LogMessages.Count); Assert.Equal(0, logger.Errors); Assert.Equal(0, logger.Warnings); } }