public async Task EnsurePackagesInstalled(PackagesLock packagesLock) { var resources = _project.PrimarySources.Concat(_project.DependencySources) .ToImmutableDictionary(x => x.GetResource <FindPackageByIdResource>(), x => x); var cache = GetDefaultResolutionContext().SourceCacheContext; var actions = (await TaskCombinators.ThrottledAsync(packagesLock, async(package, token) => { if (!_project.ModulesDirectory.ModuleExists(package.Key)) { foreach (var(resource, repo) in resources) { var versions = await resource.GetAllVersionsAsync(package.Key.Id, cache, _logger, CancellationToken.None); if (versions.Any(x => x == package.Key.Version)) { return(ResolvedAction.CreateInstall(package.Key, repo)); } } } return(null); }, CancellationToken.None)).Where(x => x != null).ToList(); if (actions.Any()) { await ExecuteActionsAsync(actions, GetDefaultDownloadContext(), CancellationToken.None); } }
public void OrderByCompletion_WhenCalledWithTwoTasksFirstSucceedsSecondIsCancelled_ShouldShowFirstSucceedingSecondCancelled() { var firstTcs = new TaskCompletionSource <int>(); var secondTcs = new TaskCompletionSource <int>(); var tasks = new List <Task <int> >() { firstTcs.Task, secondTcs.Task }; Task <int>[] tasksToWaitOn = TaskCombinators.OrderByCompletion(tasks).ToArray(); firstTcs.SetResult(1); Assert.AreEqual(1, tasksToWaitOn[0].Result); secondTcs.SetCanceled(); try { tasksToWaitOn[1].Wait(); Assert.Fail(); } catch (AggregateException errors) { Assert.AreEqual(typeof(TaskCanceledException), errors.InnerExceptions.First().GetType()); } }
public async Task <IReadOnlyList <PackageCarrier> > Load(PackagesLock packagesLock) { var mapper = new ModuleMapper(_framework, _modulesDirectory, Runtime.Windows, Environment.Is64BitProcess ? Architecture.x64 : Architecture.x86); var packageStack = mapper.BuildMap(packagesLock); var loadedPackages = new ConcurrentBag <PackageCarrier>(); while (packageStack.Any()) { var dependencyLayer = packageStack.Pop(); await TaskCombinators.ThrottledAsync(dependencyLayer, (context, token) => { return(Task.Run(() => { foreach (var module in LoadModule(context)) { loadedPackages.Add(module); } })); }, CancellationToken.None); } Packages = Packages.AddRange(loadedPackages); return(loadedPackages.ToList()); }
public async Task Load(IEnumerable <PackageIdentity> primaryPackages, PackagesLock packagesLock) { var mapper = new ModuleMapper(_project.Framework, _project.ModulesDirectory, _project.Runtime, _project.Architecture); var map = mapper.BuildMap(packagesLock); var dependencyPaths = new Dictionary <AssemblyName, AssemblyInfo>(new AssemblyNameComparer()); _dependencyAssemblies = dependencyPaths; while (map.TryPop(out var dependencyLayer)) { foreach (var context in dependencyLayer.Where(x => !x.IsMazeModule)) { foreach (var file in Directory.GetFiles(context.LibraryDirectory, "*.dll")) { dependencyPaths.Add(AssemblyName.GetAssemblyName(file), new AssemblyInfo(file)); } } await TaskCombinators.ThrottledAsync(dependencyLayer.Where(x => x.IsMazeModule), (context, token) => Task.Run(() => { foreach (var file in Directory.GetFiles(context.LibraryDirectory, "*.dll")) { var assembly = _assemblyLoadContext.LoadFromAssemblyPath(file); var typeDiscoveryService = new TypeDiscoveryService(assembly, context.Package); typeDiscoveryService.DiscoverTypes(ModuleTypeMap); } }), CancellationToken.None); } }
public void OrderByCompletion_WhenCalledWithTwoTasksFirstFailsSecondCompletes_ShouldShowFirstFailingSecondCompletingSucessfully() { var firstTcs = new TaskCompletionSource <int>(); var secondTcs = new TaskCompletionSource <int>(); var tasks = new List <Task <int> >() { firstTcs.Task, secondTcs.Task }; Task <int>[] tasksToWaitOn = TaskCombinators.OrderByCompletion(tasks).ToArray(); firstTcs.SetException(new InvalidOperationException("Boom")); try { tasksToWaitOn[0].Wait(); Assert.Fail(); } catch (AggregateException errors) { Assert.AreEqual(typeof(InvalidOperationException), errors.InnerExceptions.First().GetType()); } secondTcs.SetResult(2); Assert.AreEqual(2, tasksToWaitOn[1].Result); }
public async Task <FileCheckReport> Execute(ImportFilesRequest request, CancellationToken cancellationToken = default) { State.Status = ImportFilesStatus.Scanning; var fileInfos = new FileInformation[request.Files.Count]; var counter = -1; // as we always increment and the first index is zero await TaskCombinators.ThrottledAsync(request.Files, async (filename, _) => { var index = Interlocked.Increment(ref counter); try { var file = request.Directory.GetFile(filename); if (file == null) { return; // not found } fileInfos[index] = await _fileInformationLoader.Load(file); } finally { State.Progress = (double)counter / request.Files.Count; } }, cancellationToken, 4); State.Status = ImportFilesStatus.Querying; _checkFilesWorker.State.PropertyChanged += (_, __) => State.Progress = _checkFilesWorker.State.Progress; return(await _checkFilesWorker.Execute(new CheckFilesRequest(fileInfos.Where(x => x != null).ToList(), request.Directory), cancellationToken)); }
public void LiftM2() { var m1 = Task.Factory.StartNew(() => { Thread.Sleep(100); return(1); }); var m2 = Task.Factory.StartNew(() => { Thread.Sleep(200); return(2); }); Func <int, int, int> f = (a, b) => a + b; var m3 = TaskCombinators.LiftM2(m1, m2, f); Assert.That(m3.Result, Is.EqualTo(3)); }
public async Task <bool> InitializeAsync( ServiceHost host, string messagingPattern, int maxWaitTime) { try { // force initialization of the exception manager Facility.ExceptionManager.Policies.Count(); await TaskCombinators.RetryOnFault( // the initialization function: async() => { using (var repository = ServiceLocator.Current.GetInstance <IRepository>("transient")) await repository.InitializeAsync(); return(IsInitialized = true); }, // times to retry: 3, // process exceptions: async x => { if (x != null && !x.IsTransient()) { Facility.LogWriter .ExceptionError(x); return(false); } var delay = Task.Delay(100); Facility.LogWriter .ExceptionWarning(x); await delay; return(true); } ); return(IsInitialized); } catch (Exception x) { if (Facility.ExceptionManager.HandleException(x, ExceptionPolicyProvider.LogAndRethrowPolicyName, out var y) && y != null) { throw y; } throw; } }
public void WhenAllOrFail_WhenCalledWithTwoTasks_ShouldReturnATaskThatIsCompletedWhenBothTasksHaveCompleted() { Task <int[]> allDone = TaskCombinators .WhenAllOrFail <int>(Enumerable.Range(1, 2).Select(Task.FromResult)); allDone.Wait(); Assert.AreEqual(1, allDone.Result[0]); Assert.AreEqual(2, allDone.Result[1]); }
public async Task <ExecuteOperationsResponse> Execute(ExecuteOperationsRequest request, CancellationToken cancellationToken = default) { var deletedDirectories = new ConcurrentDictionary <string, byte>(); // hashset State.TotalOperations = request.Operations.Count; var result = await TaskCombinators.ThrottledCatchErrorsAsync(request.Operations, (operation, _) => { try { if (operation is DeleteFileOperation deleteFileOperation) { if (operation.File.RelativeFilename == null && !request.RemoveFilesFromOutside) { return(new ValueTask()); } _fileSystem.File.Delete(deleteFileOperation.File.Filename); deletedDirectories.TryAdd(_fileSystem.Path.GetDirectoryName(operation.File.Filename), default); } else if (operation is MoveFileOperation moveFileOperation) { var targetPath = _fileSystem.Path.Combine(request.PhotoDirectoryRoot, moveFileOperation.TargetPath); _fileSystem.Directory.CreateDirectory(_fileSystem.Path.GetDirectoryName(targetPath)); if (operation.File.RelativeFilename == null && !request.RemoveFilesFromOutside) { _fileSystem.File.Copy(moveFileOperation.File.Filename, targetPath, false); } else { _fileSystem.File.Move(moveFileOperation.File.Filename, targetPath); deletedDirectories.TryAdd(_fileSystem.Path.GetDirectoryName(operation.File.Filename), default); } } } finally { State.OnOperationProcessed(); } return(new ValueTask()); }, cancellationToken); foreach (var deletedDirectory in deletedDirectories.Keys) { if (!_fileSystem.Directory.EnumerateFiles(deletedDirectory, "*", SearchOption.AllDirectories).Any()) { _fileSystem.Directory.Delete(deletedDirectory, true); } } return(new ExecuteOperationsResponse(result)); }
public async Task Invoke(TContext context, CancellationToken cancellationToken) { var exceptions = await TaskCombinators.ThrottledCatchErrorsAsync(_actions, (action, token) => action.Execute(context), cancellationToken); foreach (var keyValuePair in exceptions) { Logger.Warn(keyValuePair.Value, "The action '{actionName}' threw an error on execution.", keyValuePair.Key.GetType().FullName); } }
public async Task <IActionResult> GetPackageInfo([FromBody] List <PackageIdentity> packages, [FromServices] IModuleProject moduleProject) { var resourceAsync = await moduleProject.LocalSourceRepository.GetResourceAsync <PackageMetadataResource>(); var context = new SourceCacheContext(); var metadata = await TaskCombinators.ThrottledAsync(packages, (identity, token) => resourceAsync.GetMetadataAsync(identity, context, NullLogger.Instance, token), HttpContext.RequestAborted); return(Ok(metadata.Select(Mapper.Map <PackageSearchMetadata>).ToList())); }
public Task <IEnumerable <DirectoryEntry> > GetDirectoryEntries(DirectoryInfoEx directory, CancellationToken token) { var entries = directory.EnumerateDirectories("*", SearchOption.TopDirectoryOnly, () => token.IsCancellationRequested); return(TaskCombinators.ThrottledAsync(entries, (entry, _) => Task.Run(() => { using (entry) return GetDirectoryEntry(entry, directory); }), token)); }
public Task Execute(IEnumerable <TargetId> attenders, ConcurrentDictionary <TargetId, IServiceScope> attenderScopes, CancellationToken cancellationToken) { return(TaskCombinators.ThrottledAsync(attenders, async(id, token) => { if (!attenderScopes.TryGetValue(id, out var scope)) { scope = _services.CreateScope(); } using (scope) { await ExecuteAttender(id, _services, token); } }, cancellationToken)); }
public async Task <IActionResult> GetFileProperties([FromQuery] string path, [FromServices] IEnumerable <IFilePropertyValueProvider> propertyValueProviders) { var result = new FilePropertiesDto(); var fileInfo = new FileInfo(path); var properties = new ConcurrentBag <FileProperty>(); await TaskCombinators.ThrottledCatchErrorsAsync(propertyValueProviders, (provider, token) => Task.Run(() => { foreach (var fileProperty in provider.ProvideValues(fileInfo, result).ToList()) { properties.Add(fileProperty); } }), CancellationToken.None); result.Properties = properties.ToList(); return(Ok(result)); }
public Task <IEnumerable <FileExplorerEntry> > GetEntries(DirectoryInfoEx directory, CancellationToken token) { var entries = directory.EnumerateFileSystemInfos("*", SearchOption.TopDirectoryOnly, () => token.IsCancellationRequested); return(TaskCombinators.ThrottledAsync(entries, (entry, _) => Task.Run(() => { using (entry) { if (entry.IsFolder) { return GetDirectoryEntry((DirectoryInfoEx)entry, directory); } return GetFileEntry((FileInfoEx)entry); } }, token), CancellationToken.None)); }
public async Task <IActionResult> GetSystemInfo([FromServices] IEnumerable <ISystemInfoProvider> systemInfoProviders) { var info = new ConcurrentBag <SystemInfoDto>(); var result = await TaskCombinators.ThrottledCatchErrorsAsync(systemInfoProviders, (provider, token) => Task.Run(() => { foreach (var systemInfoDto in provider.FetchInformation()) { info.Add(systemInfoDto); } }), MazeContext.RequestAborted); foreach (var error in result) { _logger.LogDebug(error.Value, "Exception occurrred when invoking {service}", error.Key.GetType().FullName); } return(Ok(info)); }
public void OrderByCompletion_WhenCalledWithTwoTasks_ShouldShowTasksCompletingOutOfOrder() { var firstTcs = new TaskCompletionSource <int>(); var secondTcs = new TaskCompletionSource <int>(); var tasks = new List <Task <int> >() { firstTcs.Task, secondTcs.Task }; Task <int>[] tasksToWaitOn = TaskCombinators.OrderByCompletion(tasks).ToArray(); secondTcs.SetResult(1); Assert.AreEqual(1, tasksToWaitOn[0].Result); firstTcs.SetResult(2); Assert.AreEqual(2, tasksToWaitOn[1].Result); }
public async Task <IReadOnlyDictionary <PackageIdentity, List <Type> > > GetControllers() { var result = new ConcurrentDictionary <PackageIdentity, List <Type> >(); var mazeControllerType = typeof(MazeController); await TaskCombinators.ThrottledAsync(_catalog.Packages, (carrier, token) => Task.Run(() => { var types = carrier.Assembly.GetExportedTypes(); var controllers = types.Where(x => mazeControllerType.IsAssignableFrom(x)).ToList(); result.AddOrUpdate(carrier.Context.Package, controllers, (identity, list) => { list.AddRange(controllers); return(list); }); }), CancellationToken.None); return(result); }
public void WhenAllOrFail_WhenCalledWithTwoTasksFirstOneSucceedsSecondIsCancelled_ShouldThrowAggregateExceptionContainingTaskCancelledExceptionAndHaveAStatusOfCancelled() { var cancelled = new TaskCompletionSource <int>(); Task <int[]> allDone = TaskCombinators .WhenAllOrFail <int>(new Task <int>[] { Task.FromResult(1), cancelled.Task }); cancelled.SetCanceled(); try { allDone.Wait(3000); Assert.Fail(); } catch (AggregateException errors) { Assert.AreEqual(typeof(TaskCanceledException), errors.Flatten().InnerExceptions.First().GetType()); } }
public void WhenAllOrFail_WhenCalledWithTwoTasksFirstOneFailsSecondNeverCompletes_ShouldThrowAggregateExceptionContainingTheFailedTaskException() { var neverCompletes = new TaskCompletionSource <int>(); var fails = new TaskCompletionSource <int>(); Task <int[]> allDone = TaskCombinators .WhenAllOrFail <int>(new Task <int>[] { neverCompletes.Task, fails.Task }); fails.SetException(new InvalidOperationException("Boom")); try { allDone.Wait(); Assert.Fail(); } catch (AggregateException errors) { Assert.AreEqual(typeof(InvalidOperationException), errors.Flatten().InnerExceptions.First().GetType()); } }
public async Task <IEnumerable <FileExplorerEntry> > GetEntriesKeepOrder(DirectoryInfoEx directory, CancellationToken token) { var entries = directory.EnumerateFileSystemInfos("*", SearchOption.TopDirectoryOnly).ToList(); var result = new FileExplorerEntry[entries.Count]; await TaskCombinators.ThrottledAsync(entries, (entry, _) => Task.Run(() => { var index = entries.IndexOf(entry); using (entry) { if (entry.IsFolder) { result[index] = GetDirectoryEntry((DirectoryInfoEx)entry, directory); } else { result[index] = GetFileEntry((FileInfoEx)entry); } } }), token); return(result); }
public async Task <IActionResult> GetPathTree([FromBody] PathTreeRequestDto request, [FromQuery] bool keepOrder, CancellationToken cancellationToken) { var response = new PathTreeResponseDto(); var directoryHelper = new DirectoryHelper(); Task <IEnumerable <FileExplorerEntry> > entriesTask = null; if (request.RequestEntries) { entriesTask = keepOrder ? directoryHelper.GetEntriesKeepOrder(request.Path, cancellationToken) : directoryHelper.GetEntries(request.Path, cancellationToken); } if (request.RequestedDirectories?.Count > 0) { var pathDirectories = PathHelper.GetPathDirectories(request.Path).ToList(); var directories = new ConcurrentDictionary <int, List <DirectoryEntry> >(); await TaskCombinators.ThrottledAsync(request.RequestedDirectories, async (i, token) => { var directoryPath = pathDirectories[i]; directories.TryAdd(i, (await directoryHelper.GetDirectoryEntries(directoryPath, cancellationToken)).ToList()); }, CancellationToken.None); response.Directories = directories; } if (entriesTask != null) { response.Entries = (await entriesTask).ToList(); } return(Ok(response)); }
public async Task <List <ProcessDto> > GetProcesses() { await _getProcessesLock.WaitAsync(); try { using (var processCollection = _searcher.Get()) { var dtos = (await TaskCombinators.ThrottledAsync(processCollection.Cast <ManagementObject>(), CreateProcessDto, CancellationToken)) .Where(x => x != null).ToList(); _latestProcessIds = new HashSet <int>(dtos.Select(x => x.ProcessId)); return(dtos); } } catch (Exception e) { throw; } finally { _getProcessesLock.Release(); } }
public async Task <FileCheckReport> Execute(CheckFilesRequest request, CancellationToken cancellationToken = default) { var directory = request.Directory; var fileInfos = request.Files; // get all files from the repository var context = await _fileBaseContextFactory.BuildContext(directory); State.Status = CheckFilesStatus.Querying; var result = new ConcurrentBag <IFileIssue>(); var counter = -1; State.TotalFiles = context.IndexedFiles.Count; await TaskCombinators.ThrottledAsync(fileInfos, async (fileInformation, token) => { var useCase = _serviceProvider.GetRequiredService <ICheckFileIntegrityUseCase>(); var response = await useCase.Handle(new CheckFileIntegrityRequest(fileInformation, context)); if (useCase.HasError) { State.Errors.Add(fileInformation, useCase.Error !); } else { foreach (var fileIssue in response !.Issues) { result.Add(fileIssue); } } State.FilesProcessed = Interlocked.Increment(ref counter); }, cancellationToken); return(new FileCheckReport(result.Distinct(new EqualityComparerByValue <IFileIssue, string>(x => x.Identity)).ToList())); }
public void OrderByCompletion_WhenCalledWithNoTasks_ShouldThrowArgumentException() { TaskCombinators.OrderByCompletion(Enumerable.Empty <Task <object> >()); }
public void WhenAllOrFail_WhenCalledWithNoTasks_ShouldThrowArgumentException() { TaskCombinators.WhenAllOrFail <int>(Enumerable.Empty <Task <int> >()); }
public async Task <SynchronizeIndexResponse> InternalExecute(SynchronizeIndexRequest request, CancellationToken cancellationToken = default) { var directory = request.Directory; await using var dataContext = directory.GetDataContext(); var operations = new ConcurrentBag <FileOperation>(); State.Status = SynchronizeIndexStatus.Scanning; // get all files from the repository var indexedFiles = await dataContext.FileRepository.GetAllReadOnlyBySpecs(new IncludeFileLocationsSpec()); var indexedFileInfos = indexedFiles.SelectMany(x => x.ToFileInfos(directory)); // get all files from the actual directory var localFiles = directory.EnumerateFiles().WithCancellation(cancellationToken).ToList(); State.Status = SynchronizeIndexStatus.Synchronizing; // get changes var(newFiles, removedFiles) = CollectionDiff.Create(indexedFileInfos, localFiles, new FileInfoComparer()); // files that are completely removed from the directory var completelyRemovedFiles = new List <IFileInfo>(); // remove files from index foreach (var removedFile in removedFiles) { var action = _serviceProvider.GetRequiredService <IRemoveFileFromIndexUseCase>(); var response = await action.Handle(new RemoveFileFromIndexRequest(removedFile.RelativeFilename !, directory)); if (action.HasError) { State.Errors.Add(removedFile.RelativeFilename !, action.Error !); } if (response !.IsCompletelyRemoved) { completelyRemovedFiles.Add(removedFile); } } IImmutableList <FileInformation> removedFileInformation = removedFiles .Select(x => GetFileInformationFromPath(x.RelativeFilename !, indexedFiles, directory)) .ToImmutableList(); var formerlyDeletedFiles = directory.MemoryManager.DirectoryMemory.DeletedFiles; var deletedFilesLock = new object(); State.Status = SynchronizeIndexStatus.IndexingNewFiles; State.TotalFiles = newFiles.Count; var processedFilesCount = 0; var removedFilesLock = new object(); var stateLock = new object(); await TaskCombinators.ThrottledCatchErrorsAsync(newFiles, async (newFile, _) => { var(action, response) = await IndexFile(newFile.Filename, directory); if (action.HasError) { lock (stateLock) { State.Errors.Add(newFile.Filename, action.Error !); } return; } var(indexedFile, fileLocation) = response !; // remove from formerly deleted files if (formerlyDeletedFiles.ContainsKey(indexedFile.Hash)) { lock (deletedFilesLock) { formerlyDeletedFiles = formerlyDeletedFiles.Remove(indexedFile.Hash); } } FileOperation fileOperation; // get operation var removedFile = removedFileInformation.FirstOrDefault(x => _fileContentComparer.Equals(x, indexedFile)); if (removedFile != null) { lock (removedFilesLock) { removedFileInformation = removedFileInformation.Remove(removedFile); } if (directory.PathComparer.Equals(fileLocation.RelativeFilename, removedFile.RelativeFilename !)) { fileOperation = FileOperation.FileChanged(fileLocation, ToFileReference(removedFile)); } else { fileOperation = FileOperation.FileMoved(fileLocation, ToFileReference(removedFile)); } } else { fileOperation = FileOperation.NewFile(fileLocation); } await using (var context = directory.GetDataContext()) { await context.OperationRepository.Add(fileOperation); operations.Add(fileOperation); } var processedFiles = Interlocked.Increment(ref processedFilesCount); State.ProcessedFiles = processedFiles; State.Progress = (double)processedFiles / newFiles.Count; }, CancellationToken.None, 8); // do not use cancellation token here as a cancellation would destroy all move/change operations as all files were already removed foreach (var removedFile in removedFileInformation) { var operation = FileOperation.FileRemoved(ToFileReference(removedFile)); await dataContext.OperationRepository.Add(operation); operations.Add(operation); // add the file to deleted files, if it was completely removed from index // WARN: if a file changes, the previous file is not marked as deleted. Idk if that is actually desired if (completelyRemovedFiles.Any(x => x.Filename == removedFile.Filename)) { formerlyDeletedFiles = formerlyDeletedFiles.Add(removedFile.Hash.ToString(), new DeletedFileInfo(removedFile.RelativeFilename !, removedFile.Length, removedFile.Hash, removedFile.PhotoProperties, removedFile.FileCreatedOn, DateTimeOffset.UtcNow)); } } await directory.MemoryManager.Update(directory.MemoryManager.DirectoryMemory.SetDeletedFiles(formerlyDeletedFiles)); return(new SynchronizeIndexResponse(operations.ToList())); }