public void ToReadableTimeFormat_Localized_AssumesSpainLocale_ContainsTimeNumber_Succeeds() { // Act string actual = DatetimeUtility.ToReadableTimeFormat(TimeSpan.FromMinutes(3.6d), new CultureInfo("es-ES")); // Assert Assert.Equal("3,6 min", actual); }
public void ToReadableTimeFormat_Localized_AssumesEnglishLocale_ContainsTimeNumber_Succeeds(string timeNumber, TimeSpan time) { // Act string actual = DatetimeUtility.ToReadableTimeFormat(time); // Assert Assert.Contains(timeNumber, actual); }
public static async Task <RestoreSummary> CommitAsync(RestoreResultPair restoreResult, CancellationToken token) { var summaryRequest = restoreResult.SummaryRequest; var result = restoreResult.Result; var log = summaryRequest.Request.Log; // Commit the result log.LogInformation(Strings.Log_Committing); await result.CommitAsync(log, token); if (result.Success) { // For no-op results, don't log a minimal message since a summary is logged at the end // For regular results, log a minimal message so that users can see which projects were actually restored log.Log( result is NoOpRestoreResult ? LogLevel.Information : LogLevel.Minimal, string.Format( CultureInfo.CurrentCulture, summaryRequest.Request.ProjectStyle == ProjectStyle.DotnetToolReference ? Strings.Log_RestoreCompleteDotnetTool : Strings.Log_RestoreComplete, summaryRequest.InputPath, DatetimeUtility.ToReadableTimeFormat(result.ElapsedTime))); } else { log.LogMinimal(string.Format( CultureInfo.CurrentCulture, summaryRequest.Request.ProjectStyle == ProjectStyle.DotnetToolReference ? Strings.Log_RestoreFailedDotnetTool : Strings.Log_RestoreFailed, summaryRequest.InputPath, DatetimeUtility.ToReadableTimeFormat(result.ElapsedTime))); } // Remote the summary messages from the assets file. This will be removed later. var messages = restoreResult.Result.LockFile?.LogMessages .Select(e => new RestoreLogMessage(e.Level, e.Code, e.Message)) ?? Enumerable.Empty <RestoreLogMessage>(); // Build the summary return(new RestoreSummary( result, summaryRequest.InputPath, summaryRequest.ConfigFiles, summaryRequest.Sources, messages)); }
public static async Task <RestoreSummary> CommitAsync(RestoreResultPair restoreResult, CancellationToken token) { var summaryRequest = restoreResult.SummaryRequest; var result = restoreResult.Result; var log = summaryRequest.Request.Log; // Commit the result log.LogInformation(Strings.Log_Committing); await result.CommitAsync(log, token); if (result.Success) { log.LogMinimal(string.Format( CultureInfo.CurrentCulture, summaryRequest.Request.ProjectStyle == ProjectStyle.DotnetToolReference ? Strings.Log_RestoreCompleteDotnetTool : Strings.Log_RestoreComplete, DatetimeUtility.ToReadableTimeFormat(result.ElapsedTime), summaryRequest.InputPath)); } else { log.LogMinimal(string.Format( CultureInfo.CurrentCulture, summaryRequest.Request.ProjectStyle == ProjectStyle.DotnetToolReference ? Strings.Log_RestoreFailedDotnetTool : Strings.Log_RestoreFailed, DatetimeUtility.ToReadableTimeFormat(result.ElapsedTime), summaryRequest.InputPath)); } // Remote the summary messages from the assets file. This will be removed later. var messages = restoreResult.Result.LockFile?.LogMessages .Select(e => new RestoreLogMessage(e.Level, e.Code, e.Message)) ?? Enumerable.Empty <RestoreLogMessage>(); // Build the summary return(new RestoreSummary( result, summaryRequest.InputPath, summaryRequest.ConfigFiles, summaryRequest.Sources, messages)); }
public static async Task <RestoreSummary> CommitAsync(RestoreResultPair restoreResult, CancellationToken token) { var summaryRequest = restoreResult.SummaryRequest; var result = restoreResult.Result; var log = summaryRequest.Request.Log; // Commit the result log.LogInformation(Strings.Log_Committing); await result.CommitAsync(log, token); if (result.Success) { log.LogMinimal(string.Format( CultureInfo.CurrentCulture, Strings.Log_RestoreComplete, DatetimeUtility.ToReadableTimeFormat(result.ElapsedTime), summaryRequest.InputPath)); } else { log.LogMinimal(string.Format( CultureInfo.CurrentCulture, Strings.Log_RestoreFailed, DatetimeUtility.ToReadableTimeFormat(result.ElapsedTime), summaryRequest.InputPath)); } // Build the summary return(new RestoreSummary( result, summaryRequest.InputPath, summaryRequest.Settings, summaryRequest.Sources, summaryRequest.CollectorLogger.Errors)); }
private async Task <HashSet <SourcePackageDependencyInfo> > GatherAsync(CancellationToken token) { // preserve start time of gather api var stopWatch = new Stopwatch(); stopWatch.Start(); token.ThrowIfCancellationRequested(); // get a distinct set of packages from all repos var combinedResults = new HashSet <SourcePackageDependencyInfo>(PackageIdentity.Comparer); // Initialize dependency info resources in parallel await InitializeResourcesAsync(token); var allPrimaryTargets = new HashSet <string>(StringComparer.OrdinalIgnoreCase); // resolve primary targets only from primary sources foreach (var primaryTarget in _context.PrimaryTargets) { // Add the id to the search list to block searching for all versions _idsSearched.Add(primaryTarget.Id); allPrimaryTargets.Add(primaryTarget.Id); QueueWork(_primaryResources, primaryTarget, ignoreExceptions: false, isInstalledPackage: false); } // null can occur for scenarios with PackageIdentities only if (_context.PrimaryTargetIds != null) { foreach (var primaryTargetId in _context.PrimaryTargetIds) { allPrimaryTargets.Add(primaryTargetId); var identity = new PackageIdentity(primaryTargetId, version: null); QueueWork(_primaryResources, identity, ignoreExceptions: false, isInstalledPackage: false); } } // Start fetching the primary targets StartWorkerTasks(token); // Gather installed packages await GatherInstalledPackagesAsync(_context.InstalledPackages, allPrimaryTargets, token); // walk the dependency graph both upwards and downwards for the new package // this is done in multiple passes to find the complete closure when // new dependecies are found while (true) { token.ThrowIfCancellationRequested(); // Start tasks for queued requests and process finished results. await StartTasksAndProcessWork(token); // Completed results var currentItems = _results.ToList(); // Get a unique list of packages // Results are ordered by their request order. If the same version of package // exists in multiple sources the hashset will contain the package from the // source where it was requested from first. var currentResults = new HashSet <SourcePackageDependencyInfo>( currentItems.OrderBy(item => item.Request.Order) .SelectMany(item => item.Packages), PackageIdentity.Comparer); // Remove downgrades if the flag is not set, this will skip unneeded dependencies from older versions if (!_context.AllowDowngrades) { foreach (var installedPackage in _context.InstalledPackages) { // Clear out all versions of the installed package which are less than the installed version currentResults.RemoveWhere(package => string.Equals(installedPackage.Id, package.Id, StringComparison.OrdinalIgnoreCase) && package.Version < installedPackage.Version); } } // Find all installed packages, these may have come from a remote source // if they were not found on disk, so it is not possible to compute this up front. var installedInfo = new HashSet <SourcePackageDependencyInfo>( currentItems.Where(item => item.Request.IsInstalledPackage) .OrderBy(item => item.Request.Order) .SelectMany(item => item.Packages), PackageIdentity.Comparer); // Find the closure of all parent and child packages around the targets // Skip walking dependencies when the behavior is set to ignore if (_context.ResolutionContext?.DependencyBehavior != Resolver.DependencyBehavior.Ignore) { var closureIds = GetClosure(currentResults, installedInfo, _idsSearched); // Find all ids in the closure that have not been gathered var missingIds = closureIds.Except(_idsSearched, StringComparer.OrdinalIgnoreCase); // Gather packages for all missing ids foreach (var missingId in missingIds) { QueueWork(_allResources, missingId, ignoreExceptions: true); } } // We are done when the queue is empty, and the number of finished requests matches the total request count if (_gatherRequests.Count < 1 && _workerTasks.Count < 1) { _context.Log.LogDebug(string.Format("Total number of results gathered : {0}", _results.Count)); break; } } token.ThrowIfCancellationRequested(); // Order sources by their request order foreach (var result in _results.OrderBy(result => result.Request.Order)) { // Merge the results, taking on the first instance of each package combinedResults.UnionWith(result.Packages); } List <String> allPrimarySourcesList = new List <string>(); foreach (var src in _primaryResources) { allPrimarySourcesList.Add(src.Source.PackageSource.Source); } var allPrimarySources = String.Join(",", allPrimarySourcesList); // When it's update all packages scenario, then ignore throwing error for missing primary targets in specified sources. if (!_context.IsUpdateAll) { // Throw if a primary target was not found // The primary package may be missing if there are network issues and the sources were unreachable foreach (var targetId in allPrimaryTargets) { if (!combinedResults.Any(package => string.Equals(package.Id, targetId, StringComparison.OrdinalIgnoreCase))) { string packageIdentity = targetId; foreach (var pid in _context.PrimaryTargets) { if (string.Equals(targetId, pid.Id, StringComparison.OrdinalIgnoreCase)) { packageIdentity = string.Format(CultureInfo.CurrentCulture, "{0} {1}", targetId, pid.Version); break; } } string message = String.Format(Strings.PackageNotFoundInPrimarySources, packageIdentity, allPrimarySources); throw new InvalidOperationException(message); } } } // calculate total time taken to gather all packages as well as with each source stopWatch.Stop(); _context.Log.LogMinimal( string.Format(Strings.GatherTotalTime, DatetimeUtility.ToReadableTimeFormat(stopWatch.Elapsed))); _context.Log.LogDebug("Summary of time taken to gather dependencies per source :"); foreach (var key in _timeTaken.Keys) { _context.Log.LogDebug( string.Format("{0}\t-\t{1}", key, DatetimeUtility.ToReadableTimeFormat(_timeTaken[key]))); } return(combinedResults); }
/// <summary> /// Resolve a package closure /// </summary> public IEnumerable <PackageIdentity> Resolve(PackageResolverContext context, CancellationToken token) { var stopWatch = new Stopwatch(); token.ThrowIfCancellationRequested(); if (context == null) { throw new ArgumentNullException(nameof(context)); } // validation foreach (var requiredId in context.RequiredPackageIds) { if (!context.AvailablePackages.Any(p => StringComparer.OrdinalIgnoreCase.Equals(p.Id, requiredId))) { throw new NuGetResolverInputException(String.Format(CultureInfo.CurrentCulture, Strings.MissingDependencyInfo, requiredId)); } } // convert the available packages into ResolverPackages var resolverPackages = new List <ResolverPackage>(); // pre-process the available packages to remove any packages that can't possibly form part of a solution var availablePackages = RemoveImpossiblePackages(context.AvailablePackages, context.RequiredPackageIds); foreach (var package in availablePackages) { IEnumerable <PackageDependency> dependencies = null; // clear out the dependencies if the behavior is set to ignore if (context.DependencyBehavior == DependencyBehavior.Ignore) { dependencies = Enumerable.Empty <PackageDependency>(); } else { dependencies = package.Dependencies ?? Enumerable.Empty <PackageDependency>(); } resolverPackages.Add(new ResolverPackage(package.Id, package.Version, dependencies, package.Listed, false)); } // Sort the packages to make this process as deterministic as possible resolverPackages.Sort(PackageIdentityComparer.Default); // Keep track of the ids we have added var groupsAdded = new HashSet <string>(StringComparer.OrdinalIgnoreCase); var grouped = new List <List <ResolverPackage> >(); // group the packages by id foreach (var group in resolverPackages.GroupBy(e => e.Id, StringComparer.OrdinalIgnoreCase)) { groupsAdded.Add(group.Key); var curSet = group.ToList(); // add an absent package for non-targets // being absent allows the resolver to throw it out if it is not needed if (!context.RequiredPackageIds.Contains(group.Key, StringComparer.OrdinalIgnoreCase)) { curSet.Add(new ResolverPackage(id: group.Key, version: null, dependencies: null, listed: true, absent: true)); } grouped.Add(curSet); } // find all needed dependencies var dependencyIds = resolverPackages.Where(e => e.Dependencies != null) .SelectMany(e => e.Dependencies.Select(d => d.Id).Distinct(StringComparer.OrdinalIgnoreCase)); foreach (string depId in dependencyIds) { // packages which are unavailable need to be added as absent packages // ex: if A -> B and B is not found anywhere in the source repositories we add B as absent if (!groupsAdded.Contains(depId)) { groupsAdded.Add(depId); grouped.Add(new List <ResolverPackage>() { new ResolverPackage(id: depId, version: null, dependencies: null, listed: true, absent: true) }); } } token.ThrowIfCancellationRequested(); // keep track of the best partial solution var bestSolution = Enumerable.Empty <ResolverPackage>(); Action <IEnumerable <ResolverPackage> > diagnosticOutput = (partialSolution) => { // store each solution as they pass through. // the combination solver verifies that the last one returned is the best bestSolution = partialSolution; }; // Run solver var comparer = new ResolverComparer(context.DependencyBehavior, context.PreferredVersions, context.TargetIds); var sortedGroups = ResolverInputSort.TreeFlatten(grouped, context); var solution = CombinationSolver <ResolverPackage> .FindSolution( groupedItems : sortedGroups, itemSorter : comparer, shouldRejectPairFunc : ResolverUtility.ShouldRejectPackagePair, diagnosticOutput : diagnosticOutput); // check if a solution was found if (solution != null) { var nonAbsentCandidates = solution.Where(c => !c.Absent); if (nonAbsentCandidates.Any()) { // topologically sort non absent packages var sortedSolution = ResolverUtility.TopologicalSort(nonAbsentCandidates); // Find circular dependency for topologically sorted non absent packages since it will help maintain cache of // already processed packages var circularReferences = ResolverUtility.FindFirstCircularDependency(sortedSolution); if (circularReferences.Any()) { // the resolver is able to handle circular dependencies, however we should throw here to keep these from happening throw new NuGetResolverConstraintException( String.Format(CultureInfo.CurrentCulture, Strings.CircularDependencyDetected, String.Join(" => ", circularReferences.Select(package => $"{package.Id} {package.Version.ToNormalizedString()}")))); } // solution found! stopWatch.Stop(); context.Log.LogMinimal( string.Format(Strings.ResolverTotalTime, DatetimeUtility.ToReadableTimeFormat(stopWatch.Elapsed))); return(sortedSolution.ToArray()); } } // no solution was found, throw an error with a diagnostic message var message = ResolverUtility.GetDiagnosticMessage(bestSolution, context.AvailablePackages, context.PackagesConfig, context.TargetIds, context.PackageSources); throw new NuGetResolverConstraintException(message); }
/// <summary> /// Resolve a package closure /// </summary> public IEnumerable <PackageIdentity> Resolve(PackageResolverContext context, CancellationToken token) { var stopWatch = new Stopwatch(); token.ThrowIfCancellationRequested(); if (context == null) { throw new ArgumentNullException(nameof(context)); } // validation foreach (var requiredId in context.RequiredPackageIds) { if (!context.AvailablePackages.Any(p => StringComparer.OrdinalIgnoreCase.Equals(p.Id, requiredId))) { throw new NuGetResolverInputException(String.Format(CultureInfo.CurrentCulture, "Unable to find package '{0}'. Existing packages must be restored before performing an install or update.", requiredId)); } } var invalidExistingPackages = new List <string>(); var installedPackages = context.PackagesConfig.Select(p => p.PackageIdentity).ToArray(); // validate existing package.config for any invalid dependency foreach (var package in installedPackages) { var existingPackage = context.AvailablePackages.FirstOrDefault(package.Equals); if (existingPackage != null) { // check if each dependency can be satisfied with existing packages var brokenDependencies = GetBrokenDependencies(existingPackage, installedPackages); if (brokenDependencies != null && brokenDependencies.Any()) { invalidExistingPackages.AddRange(brokenDependencies.Select(dependency => FormatDependencyConstraint(existingPackage, dependency))); } } else { // check same package is being updated and we've a higher version then // ignore logging warning for that. existingPackage = context.AvailablePackages.FirstOrDefault( p => StringComparer.OrdinalIgnoreCase.Equals(p.Id, package.Id) && VersionComparer.Default.Compare(p.Version, package.Version) > 0); if (existingPackage == null) { var packageString = $"'{package.Id} {package.Version.ToNormalizedString()}'"; invalidExistingPackages.Add(packageString); } } } // log warning message for all the invalid package dependencies if (invalidExistingPackages.Count > 0) { context.Log.LogWarning( string.Format( CultureInfo.CurrentCulture, "One or more unresolved package dependency constraints detected in the existing packages.config file. All dependency constraints must be resolved to add or update packages. If these packages are being updated this message may be ignored, if not the following error(s) may be blocking the current package operation: {0}", string.Join(", ", invalidExistingPackages))); } // convert the available packages into ResolverPackages var resolverPackages = new List <ResolverPackage>(); // pre-process the available packages to remove any packages that can't possibly form part of a solution var availablePackages = RemoveImpossiblePackages(context.AvailablePackages, context.RequiredPackageIds); foreach (var package in availablePackages) { IEnumerable <PackageDependency> dependencies = null; // clear out the dependencies if the behavior is set to ignore if (context.DependencyBehavior == DependencyBehavior.Ignore) { dependencies = Enumerable.Empty <PackageDependency>(); } else { dependencies = package.Dependencies ?? Enumerable.Empty <PackageDependency>(); } resolverPackages.Add(new ResolverPackage(package.Id, package.Version, dependencies, package.Listed, false)); } // Sort the packages to make this process as deterministic as possible resolverPackages.Sort(PackageIdentityComparer.Default); // Keep track of the ids we have added var groupsAdded = new HashSet <string>(StringComparer.OrdinalIgnoreCase); var grouped = new List <List <ResolverPackage> >(); // group the packages by id foreach (var group in resolverPackages.GroupBy(e => e.Id, StringComparer.OrdinalIgnoreCase)) { groupsAdded.Add(group.Key); var curSet = group.ToList(); // add an absent package for non-targets // being absent allows the resolver to throw it out if it is not needed if (!context.RequiredPackageIds.Contains(group.Key, StringComparer.OrdinalIgnoreCase)) { curSet.Add(new ResolverPackage(id: group.Key, version: null, dependencies: null, listed: true, absent: true)); } grouped.Add(curSet); } // find all needed dependencies var dependencyIds = resolverPackages.Where(e => e.Dependencies != null) .SelectMany(e => e.Dependencies.Select(d => d.Id).Distinct(StringComparer.OrdinalIgnoreCase)); foreach (string depId in dependencyIds) { // packages which are unavailable need to be added as absent packages // ex: if A -> B and B is not found anywhere in the source repositories we add B as absent if (!groupsAdded.Contains(depId)) { groupsAdded.Add(depId); grouped.Add(new List <ResolverPackage>() { new ResolverPackage(id: depId, version: null, dependencies: null, listed: true, absent: true) }); } } token.ThrowIfCancellationRequested(); // keep track of the best partial solution var bestSolution = Enumerable.Empty <ResolverPackage>(); Action <IEnumerable <ResolverPackage> > diagnosticOutput = (partialSolution) => { // store each solution as they pass through. // the combination solver verifies that the last one returned is the best bestSolution = partialSolution; }; // Run solver var comparer = new ResolverComparer(context.DependencyBehavior, context.PreferredVersions, context.TargetIds); var sortedGroups = ResolverInputSort.TreeFlatten(grouped, context); var solution = CombinationSolver <ResolverPackage> .FindSolution( groupedItems : sortedGroups, itemSorter : comparer, shouldRejectPairFunc : ResolverUtility.ShouldRejectPackagePair, diagnosticOutput : diagnosticOutput); // check if a solution was found if (solution != null) { var nonAbsentCandidates = solution.Where(c => !c.Absent); if (nonAbsentCandidates.Any()) { // topologically sort non absent packages var sortedSolution = ResolverUtility.TopologicalSort(nonAbsentCandidates); // Find circular dependency for topologically sorted non absent packages since it will help maintain cache of // already processed packages var circularReferences = ResolverUtility.FindFirstCircularDependency(sortedSolution); if (circularReferences.Any()) { // the resolver is able to handle circular dependencies, however we should throw here to keep these from happening throw new NuGetResolverConstraintException( String.Format(CultureInfo.CurrentCulture, "Circular dependency detected '{0}'.", String.Join(" => ", circularReferences.Select(package => $"{package.Id} {package.Version.ToNormalizedString()}")))); } // solution found! stopWatch.Stop(); context.Log.LogMinimal( string.Format("Resolving dependency information took {0}", DatetimeUtility.ToReadableTimeFormat(stopWatch.Elapsed))); return(sortedSolution.ToArray()); } } var absentPackages = bestSolution.Where(x => x?.Absent == true).ToList(); if (absentPackages.Any()) { throw new NuGetResolverConstraintException($"There were {absentPackages.Count} absent packages found:\r\n{string.Join(", ", absentPackages.Select(x => x.Id))}"); } // no solution was found, throw an error with a diagnostic message var message = ResolverUtility.GetDiagnosticMessage(bestSolution, context.AvailablePackages, context.PackagesConfig, context.TargetIds, context.PackageSources); throw new NuGetResolverConstraintException(message); }