public async Task <bool> Commit(ILogger log, CancellationToken token) { var perfTracker = LocalCache.PerfTracker; // Find all files with changes var withChanges = Files.Values.Where(e => e.HasChanges).ToList(); // Order files so that nupkgs are pushed first to help clients avoid // missing files during the push. withChanges.Sort(new SleetFileComparer()); if (withChanges.Count > 0) { var bytes = withChanges.Select(e => e as FileBase) .Where(e => e != null) .Sum(e => e.LocalFileSizeIfExists); // Create tasks to run in parallel var tasks = withChanges.Select(e => GetCommitFileFunc(e, log, token)); var message = $"Files committed: {withChanges.Count} Size: {PrintUtility.GetBytesString(bytes)} Total upload time: " + "{0}"; using (var timer = PerfEntryWrapper.CreateSummaryTimer(message, perfTracker)) { // Push in parallel await TaskUtils.RunAsync( tasks : tasks, useTaskRun : true, maxThreads : 8, token : token); } } return(true); }
public async Task Push(ILogger log, CancellationToken token) { if (HasChanges) { using (var timer = PerfEntryWrapper.CreateFileTimer(this, PerfTracker, PerfFileEntry.FileOperation.Put)) { var retry = Math.Max(RetryCount, 1); for (var i = 0; i < retry; i++) { try { // Upload to remote source. await CopyToSource(log, token); // The file no longer has changes. HasChanges = false; break; } catch (Exception ex) when(i < (retry - 1)) { await log.LogAsync(LogLevel.Debug, ex.ToString()); await log.LogAsync(LogLevel.Warning, $"Failed to upload '{RootPath}'. Retrying."); await Task.Delay(TimeSpan.FromSeconds(10)); } } } } }
/// <summary> /// Ensure that the file exists on disk if it exists. /// </summary> protected async Task EnsureFile(ILogger log, CancellationToken token) { EnsureValid(); if (!IsDownloaded) { using (var timer = PerfEntryWrapper.CreateFileTimer(this, PerfTracker, PerfFileEntry.FileOperation.Get)) { var retry = Math.Max(RetryCount, 1); for (var i = 0; !IsDownloaded && i < retry; i++) { try { // Delete any existing file DeleteInternal(); // Download from the remote source. await CopyFromSource(log, token); IsDownloaded = true; } catch (Exception ex) when(i < (retry - 1)) { await log.LogAsync(LogLevel.Debug, ex.ToString()); await log.LogAsync(LogLevel.Warning, $"Failed to sync '{RootPath}'. Retrying."); await Task.Delay(TimeSpan.FromSeconds(5)); } } } } }
public async Task RunAsync(SleetOperations operations, IPerfTracker perfTracker) { try { // Pre load while waiting for dependencies await Service.PreLoadAsync(operations); if (Dependencies.Count > 0) { // Wait for dependencies // await Task.WhenAll(Dependencies.Select(e => e.Semaphore.WaitAsync())); foreach (var dep in Dependencies) { await dep.WaitAsync(); } } var name = Service.GetType().ToString().Split('.').Last(); var message = $"Updated {name} in " + "{0}"; using (var timer = PerfEntryWrapper.CreateSummaryTimer(message, perfTracker)) { // Update the service await Service.ApplyOperationsAsync(operations); } } finally { // Complete _done = true; } }
public static async Task <bool> RunAsync(LocalSettings settings, ISleetFileSystem source, List <string> inputs, bool force, bool skipExisting, ILogger log) { var token = CancellationToken.None; var now = DateTimeOffset.UtcNow; var success = false; var perfTracker = source.LocalCache.PerfTracker; await log.LogAsync(LogLevel.Minimal, $"Reading feed {source.BaseURI.AbsoluteUri}"); using (var timer = PerfEntryWrapper.CreateSummaryTimer("Total execution time: {0}", perfTracker)) { // Partition package inputs to avoid reading 100K nuspecs at the same time. var packagePaths = GetPackagePaths(inputs); var inputBatches = packagePaths.Partition(DefaultBatchSize); ISleetFileSystemLock feedLock = null; try { for (var i = 0; i < inputBatches.Count; i++) { var inputBatch = inputBatches[i]; if (inputBatches.Count > 1) { await log.LogAsync(LogLevel.Minimal, $"Pushing {inputBatch.Count} packages. Batch: {i+1} / {inputBatches.Count}"); } // Read packages before locking the feed the first time. var packages = new List <PackageInput>(await GetPackageInputs(inputBatch, now, perfTracker, log)); if (feedLock == null) { string lockMessage = null; if (packages.Count > 0) { lockMessage = $"Push of {packages[0].Identity.ToString()}"; } // Check if already initialized feedLock = await SourceUtility.VerifyInitAndLock(settings, source, lockMessage, log, token); // Validate source await SourceUtility.ValidateFeedForClient(source, log, token); } // Push success = await PushPackages(settings, source, packages, force, skipExisting, log, token); } } finally { // Unlock the feed feedLock?.Dispose(); } } // Write out perf summary await perfTracker.LogSummary(log); return(success); }
public static async Task <bool> RunAsync(LocalSettings settings, ISleetFileSystem source, string outputPath, bool ignoreErrors, bool noLock, bool skipExisting, ILogger log) { var token = CancellationToken.None; ISleetFileSystemLock feedLock = null; var success = true; var perfTracker = source.LocalCache.PerfTracker; using (var timer = PerfEntryWrapper.CreateSummaryTimer("Total execution time: {0}", perfTracker)) { // Check if already initialized try { if (!noLock) { // Lock feedLock = await SourceUtility.VerifyInitAndLock(settings, source, "Download", log, token); // Validate source await UpgradeUtility.EnsureCompatibility(source, log, token); } success = await DownloadPackages(settings, source, outputPath, ignoreErrors, log, token); } finally { feedLock?.Dispose(); } } // Write out perf summary await perfTracker.LogSummary(log); return(success); }
/// <summary> /// Parse input arguments for nupkg paths. /// </summary> private static async Task<List<PackageInput>> GetPackageInputs(List<string> packagePaths, DateTimeOffset now, IPerfTracker perfTracker, ILogger log) { using (var timer = PerfEntryWrapper.CreateSummaryTimer("Loaded package nuspecs in {0}", perfTracker)) { var tasks = packagePaths.Select(e => new Func<Task<PackageInput>>(() => GetPackageInput(e, log))); var packageInputs = await TaskUtils.RunAsync(tasks, useTaskRun: true, token: CancellationToken.None); var packagesSorted = packageInputs.OrderBy(e => e).ToList(); return packagesSorted; } }
/// <summary> /// Create the file directly without loading the previous file. /// </summary> public async Task CreateAsync(PackageSets index) { using (var timer = PerfEntryWrapper.CreateModifyTimer(File, Context)) { // Create updated index var json = CreateJson(index); var isEmpty = (index.Packages.Index.Count < 1) && (index.Symbols.Index.Count < 1); await SaveAsync(json, isEmpty); } }
/// <summary> /// Write json to the file. /// </summary> public Task Write(JObject json, ILogger log, CancellationToken token) { using (var timer = PerfEntryWrapper.CreateFileTimer(this, PerfTracker, PerfFileEntry.FileOperation.LocalWrite)) { // Remove the file if it exists Delete(log, token); // Write out json to the file. return(JsonUtility.SaveJsonAsync(LocalCacheFile, json)); } }
private async Task CreateIndexAsync(string id, SortedSet <NuGetVersion> versions) { // Update index var indexFile = _context.Source.Get(GetIndexUri(id)); using (var timer = PerfEntryWrapper.CreateModifyTimer(indexFile, _context.PerfTracker)) { var indexJson = CreateIndexJson(versions); await indexFile.Write(indexJson, _context.Log, _context.Token); } }
/// <summary> /// Main entry point for updating the feed. /// All add/remove operations can be added to a single changeContext and applied in /// a single call using this method. /// </summary> public static async Task ApplyPackageChangesAsync(SleetContext context, SleetOperations changeContext) { using (var timer = PerfEntryWrapper.CreateSummaryTimer("Updated all files locally. Total time: {0}", context.PerfTracker)) { var steps = GetSteps(context); var tasks = steps.Select(e => new Func <Task>(() => e.RunAsync(changeContext, context.PerfTracker))).ToList(); // Run each service on its own thread and in parallel // Services with depenencies will pre-fetch files that will be used later // and then wait until the other services have completed. await TaskUtils.RunAsync(tasks, useTaskRun : true, maxThreads : steps.Count, token : CancellationToken.None); } }
/// <summary> /// Remove packages from index and remove details pages if they exist. /// </summary> private async Task RemovePackagesFromIndexAsync(string packageId, HashSet <NuGetVersion> versions) { // Retrieve index var rootUri = GetIndexUri(packageId); var rootFile = _context.Source.Get(rootUri); using (var timer = PerfEntryWrapper.CreateModifyTimer(rootFile, _context)) { var modified = false; var packages = new List <JObject>(); var json = await rootFile.GetJsonOrNull(_context.Log, _context.Token); if (json != null) { // Get all entries packages = await GetPackageDetails(json); foreach (var entry in packages.ToArray()) { var version = GetPackageVersion(entry); if (versions.Contains(version)) { modified = true; packages.Remove(entry); // delete details page DeletePackagePage(new PackageIdentity(packageId, version)); } } } if (modified) { if (packages.Count > 0) { // Create index var newIndexJson = await CreateIndexAsync(rootUri, packages); // Write await rootFile.Write(newIndexJson, _context.Log, _context.Token); } else { // This package id been completely removed rootFile.Delete(_context.Log, _context.Token); } } } }
/// <summary> /// Write a stream to the file. /// </summary> public async Task Write(Stream stream, ILogger log, CancellationToken token) { using (var timer = PerfEntryWrapper.CreateFileTimer(this, PerfTracker, PerfFileEntry.FileOperation.LocalWrite)) { // Remove the file if it exists Delete(log, token); using (stream) using (var writeStream = File.OpenWrite(LocalCacheFile.FullName)) { await stream.CopyToAsync(writeStream); } } }
/// <summary> /// Create a package details page for a package id/version. /// </summary> private async Task CreatePackagePageAsync(PackageInput package) { // Create package page var packageUri = GetPackageUri(package.Identity); var packageFile = _context.Source.Get(packageUri); using (var timer = PerfEntryWrapper.CreateModifyTimer(packageFile, _context)) { var packageJson = await CreatePackageBlobAsync(package); // Write package page await packageFile.Write(packageJson, _context.Log, _context.Token); } }
private async Task CreatePackageIndexAsync(string packageId, List <PackageInput> packageInputs) { // Retrieve index var rootUri = GetIndexUri(packageId); var rootFile = _context.Source.Get(rootUri); using (var timer = PerfEntryWrapper.CreateModifyTimer(rootFile, _context)) { var packages = new List <JObject>(); var json = await rootFile.GetJsonOrNull(_context.Log, _context.Token); if (json != null) { // Get all entries packages = await GetPackageDetails(json); } // Remove any duplicates from the file var newPackageVersions = new HashSet <NuGetVersion>(packageInputs.Select(e => e.Identity.Version)); foreach (var existingPackage in packages.ToArray()) { var existingVersion = GetPackageVersion(existingPackage); if (newPackageVersions.Contains(existingVersion)) { packages.Remove(existingPackage); _context.Log.LogWarning($"Removed duplicate registration entry for: {new PackageIdentity(packageId, existingVersion)}"); } } // Add package entries foreach (var package in packageInputs) { // Add entry var newEntry = CreateItem(package); packages.Add(newEntry); } // Create index var newIndexJson = await CreateIndexAsync(rootUri, packages); // Write await rootFile.Write(newIndexJson, _context.Log, _context.Token); } }
/// <summary> /// Create the file directly without loading the previous file. /// </summary> public async Task CreateAsync(IEnumerable <string> packageIds) { using (var timer = PerfEntryWrapper.CreateModifyTimer(RootIndexFile, _context.PerfTracker)) { var ids = new SortedSet <string>(packageIds, StringComparer.OrdinalIgnoreCase); // Create a new file using the full set of package ids. // There is no need to read the existing file. var json = await GetEmptyTemplate(); var data = new JArray(ids.Take(MaxResults)); json["data"] = data; json["totalHits"] = data.Count; var formatted = JsonLDTokenComparer.Format(json, recurse: false); await RootIndexFile.Write(formatted, _context.Log, _context.Token); } }
public async Task ApplyOperationsAsync(SleetOperations changeContext) { var file = RootIndexFile; using (var timer = PerfEntryWrapper.CreateModifyTimer(file, _context)) { var json = await file.GetJson(_context.Log, _context.Token); // Read existing entries // Modified packages will be rebuilt, other entries will be left as-is. var data = GetData(json); foreach (var packageId in changeContext.GetChangedIds()) { // Remove the existing entry if it exists if (data.ContainsKey(packageId)) { data.Remove(packageId); } var packages = await changeContext.UpdatedIndex.Packages.GetPackagesByIdAsync(packageId); var versions = new SortedSet <NuGetVersion>(packages.Select(e => e.Version)); // If no versions exist then there is no extra work needed. if (versions.Count > 0) { // Rebuild the new entry var newEntry = await CreatePackageEntry(packageId, versions); data.Add(packageId, newEntry); } } json = await CreatePage(data); // Write the result await file.Write(json, _context.Log, _context.Token); } }