private static async Task <int> RefactorCommand(IHost host, FileInfo solutionPath, string projectName) { return(await AnsiConsole.Progress().AutoClear(false) .Columns(new TaskDescriptionColumn(), new ProgressBarColumn(), new PercentageColumn(), new ElapsedTimeColumn()).StartAsync(async progressContext => { var logger = host.Services.GetRequiredService <ILogger <Program> >(); var progressSubscriber = new AnsiConsoleProgressSubscriber(progressContext); var workspace = CreateWorkspace(progressSubscriber, logger); var(solution, proj) = await OpenSolutionAndProject(solutionPath, projectName, workspace, progressSubscriber, logger); if (proj == null) { return -1; } progressSubscriber.Report( new FindInternalClassesProgress(FindInternalTypesStep.LoadSolution, 100)); var port = host.Services.GetRequiredService <IFindInternalTypesPort>(); var internalSymbols = await port.FindProjClassesWhichCanBeInternalAsync(solution, proj.Id, progressSubscriber, CancellationToken.None); AnsiConsole.WriteLine($"Found {internalSymbols.Count()} public types which can be made internal"); var fix = host.Services.GetRequiredService <IPublicToInternalFixPort>(); var newSolution = await fix.MakePublicTypesInternal(solution, internalSymbols); return workspace.TryApplyChanges(newSolution) ? 0 : -1; })); }
private static void Bar() { Thread.Sleep(1300); AnsiConsole.Progress().Start ( ctx => { // Define tasks var task1 = ctx.AddTask("[blue]CPU[/]"); var task2 = ctx.AddTask("[red]RAM[/]"); float LastCPU = 0f; float LastRAM = 0f, PercentRAM = 0f; while (true) { //Console.WriteLine("CPU: {0}, LastCPU: {1}", CPU, LastCPU); PercentRAM = 100f - (RAM / TotalRAM * 100); task1.Increment(CPU - LastCPU); task2.Increment(PercentRAM - LastRAM); LastCPU = CPU; LastRAM = PercentRAM; //Console.WriteLine("RAM: {2}, Percent: {0}, Last: {1}", PercentRAM, LastRAM, RAM); Thread.Sleep(1000); } } ); }
public static void Main() { AnsiConsole.MarkupLine("[yellow]Initializing warp drive[/]..."); // Show progress AnsiConsole.Progress() .AutoClear(false) .Columns(new ProgressColumn[] { new TaskDescriptionColumn(), // Task description new ProgressBarColumn(), // Progress bar new PercentageColumn(), // Percentage new RemainingTimeColumn(), // Remaining time new SpinnerColumn(), // Spinner }) .Start(ctx => { var random = new Random(DateTime.Now.Millisecond); // Create some tasks var tasks = CreateTasks(ctx, random); var warpTask = ctx.AddTask("Going to warp", autoStart: false).IsIndeterminate(); // Wait for all tasks (except the indeterminate one) to complete while (!ctx.IsFinished) { // Increment progress foreach (var(task, increment) in tasks) { task.Increment(random.NextDouble() * increment); } // Write some random things to the terminal if (random.NextDouble() < 0.1) { WriteLogMessage(); } // Simulate some delay Thread.Sleep(100); } // Now start the "warp" task warpTask.StartTask(); warpTask.IsIndeterminate(false); while (!ctx.IsFinished) { warpTask.Increment(12 * random.NextDouble()); // Simulate some delay Thread.Sleep(100); } }); // Done AnsiConsole.MarkupLine("[green]Done![/]"); }
private static Progress Progress() => AnsiConsole.Progress() .Columns( new TaskDescriptionColumn(), new ProgressBarColumn(), new PercentageColumn(), new RemainingTimeColumn(), new SpinnerColumn() );
/// <summary> /// Runs all puzzles found by the <see cref="Infrastructure.PuzzleLocator"/> and displays their output. /// </summary> public void Run() { var output = new ConcurrentBag <(PuzzleOutput sample, PuzzleOutput puzzle)>(); var sw = new Stopwatch(); var cts = new CancellationTokenSource(); CancellationToken token = cts.Token; // Give us 20s to run all tests, and bail after that. cts.CancelAfter(20_000); int completed = 0; var progressTask = Task.Run(() => AnsiConsole.Progress() .AutoClear(true) .Columns(new ProgressColumn[] { new TaskDescriptionColumn(), // Task description new ProgressBarColumn(), // Progress bar new SpinnerColumn(), // Spinner }) .Start(ctx => { ProgressTask task1 = ctx.AddTask("[blue]Vacationing in the tropics[/]"); task1.MaxValue = PuzzleLocator.Puzzles.Count - 100; task1.StartTask(); while (!ctx.IsFinished && !token.IsCancellationRequested) { double increment = completed - task1.Value; task1.Increment(increment); } }), token); sw.Start(); PuzzleLocator.Puzzles.ParallelForEachAsync(async(puzzleGenericType) => { dynamic puzzle = PuzzleFactory.Build(puzzleGenericType); string?name = puzzleGenericType?.FullName ?? "N/A"; output.Add( ( await RunAsync(name, () => puzzle.ValidateSample(), token), await RunAsync(name, () => puzzle.Solve(), token) )); Interlocked.Increment(ref completed); }).Wait(); sw.Stop(); progressTask.GetAwaiter().GetResult(); OutputRenderer.RenderResults(output); AnsiConsole.Console.MarkupLine($"[yellow]Advent of Code 2020 - Total Run Time: [/][teal]{sw.ElapsedMilliseconds}ms[/]"); }
public static async Task <EnvironmentCheckResults> ExecuteAllEnvironmentChecks(IServiceProvider services, CancellationToken token = default(CancellationToken)) { var results = new EnvironmentCheckResults(); var checks = services.discoverChecks().ToArray(); if (!checks.Any()) { AnsiConsole.WriteLine("No environment checks."); return(results); } await AnsiConsole.Progress().StartAsync(async c => { var task = c.AddTask("[bold]Running Environment Checks[/]", new ProgressTaskSettings { MaxValue = checks.Length }); for (int i = 0; i < checks.Length; i++) { var check = checks[i]; try { await check.Assert(services, token); AnsiConsole.MarkupLine( $"[green]{(i + 1).ToString().PadLeft(4)}.) Success: {check.Description}[/]"); results.RegisterSuccess(check.Description); } catch (Exception e) { AnsiConsole.MarkupLine( $"[red]{(i + 1).ToString().PadLeft(4)}.) Failed: {check.Description}[/]"); AnsiConsole.WriteException(e); results.RegisterFailure(check.Description, e); } finally { task.Increment(1); } } task.StopTask(); }); return(results); }
private static void SynchronousProgressBars() { AnsiConsole.Progress() .Start(ctx => { var task1 = ctx.AddTask("[green]Reticulating splines[/]"); var task2 = ctx.AddTask("[green]Folding space[/]"); while (!ctx.IsFinished) { task1.Increment(0.5); task2.Increment(0.1); } }); }
public async Task RunAsync(DirectoryInfo toolsDirectory, CancellationToken cancellationToken) { await AnsiConsole .Progress() .Columns( new ProgressBarColumn(), new PercentageColumn(), new RemainingTimeColumn(), new DownloadedColumn(), new TaskDescriptionColumn { Alignment = Justify.Left } ) .StartAsync(async context => await RunAsync(context, toolsDirectory, cancellationToken)); }
private static async Task <int> AnalyzeCommand(IHost host, FileInfo solutionPath, string projectName) { try { return(await AnsiConsole.Progress().AutoClear(false) .Columns(new TaskDescriptionColumn(), new ProgressBarColumn(), new PercentageColumn(), new ElapsedTimeColumn()). StartAsync(async progressContext => { var logger = host.Services.GetRequiredService <ILogger <Program> >(); var progressSubscriber = new AnsiConsoleProgressSubscriber(progressContext); var workspace = CreateWorkspace(progressSubscriber, logger); var(solution, proj) = await OpenSolutionAndProject(solutionPath, projectName, workspace, progressSubscriber, logger); if (proj == null) { return -1; } progressSubscriber.Report( new FindInternalClassesProgress(FindInternalTypesStep.LoadSolution, 100)); var port = host.Services.GetRequiredService <IFindInternalTypesPort>(); var internalSymbols = await port.FindProjClassesWhichCanBeInternalAsync(solution, proj.Id, progressSubscriber, CancellationToken.None); AnsiConsole.WriteLine($"Found {internalSymbols.Count()} public types which can be made internal"); var table = new Table(); table.AddColumn("№").AddColumn("Type").AddColumn("Location"); var i = 0; foreach (var symbol in internalSymbols) { table.AddRow($"{++i}", $"{symbol.TypeKind} {symbol.Name}", symbol.Locations.FirstOrDefault()?.GetLineSpan().ToString() ?? string.Empty); } AnsiConsole.Render(table); return 0; })); } catch (Exception e) { AnsiConsole.WriteException(e); return(-1); } }
public override int Execute(CommandContext context, Settings settings) { if (string.IsNullOrWhiteSpace(settings.Input)) { settings.Input = _inputOptions.Filename; } settings.Framerate ??= _inputOptions.Framerate; if (string.IsNullOrWhiteSpace(settings.Output)) { settings.Output = _outputOptions.Filename; } AnsiConsole.Progress() .AutoRefresh(false) .AutoClear(false) .HideCompleted(true) .Columns(new TaskDescriptionColumn { Alignment = Justify.Left }) .Start(ctx => { var loopTask = ctx.AddTask("Starting message loop"); if (!_messageLoopService.Start(loopTask)) { return; } while (!loopTask.IsFinished) { Thread.Sleep(20); } var playerTask = ctx.AddTask("Starting track list player"); if (_trackListPlayer.Setup(playerTask, settings.Input, settings.Output, _outputOptions.Format, FrameRateExtensions.FromDouble((double)settings.Framerate))) { _trackListPlayer.Render(ctx); } }); return(1); }
private static async Task AsynchronousProgressBars() { await AnsiConsole.Progress() .StartAsync(async ctx => { var task1 = ctx.AddTask("[green]Reticulating splines[/]"); var task2 = ctx.AddTask("[green]Folding space[/]"); while (!ctx.IsFinished) { await Task.Delay(100); task1.Increment(2); task2.Increment(1); } }); }
public override void Run(BuildContext context) { if (context.GitHubActions().IsRunningOnGitHubActions) { BuildSolution(context, color: false); GenerateDocumentation(context); } else { AnsiConsole.Progress() .AutoClear(false) .AutoRefresh(true) .Columns(new ProgressColumn[] { new TaskDescriptionColumn(), new ProgressBarColumn(), new PercentageColumn(), new RemainingTimeColumn(), new SpinnerColumn(Spinner.Known.CircleQuarters), }) .Start(ctx => { var buildTask = ctx.AddTask("Build", new ProgressTaskSettings() { MaxValue = context.ProjectsToBuild.Count(), AutoStart = false }); var buildDocTask = ctx.AddTask("Build Doc.", new ProgressTaskSettings() { MaxValue = 1, AutoStart = false }); buildTask.StartTask(); BuildSolution(context, color: true, new Progress <int>(p => buildTask.Increment(p))); buildTask.StopTask(); buildDocTask.StartTask(); GenerateDocumentation(context); buildDocTask.Increment(1); buildDocTask.StopTask(); }); } }
public Task StartProgressContext(RunnerOptions options, Func <IProgressContext, Task> predicate) => AnsiConsole.Progress() .AutoRefresh(true) .AutoClear(options.Verbosity != TraceLevel.Verbose) .Columns( new TaskDescriptionColumn() { Alignment = Justify.Left }, new ProgressBarColumn() { Width = null }, new PercentageColumn(), new ElapsedTimeColumn(), new SpinnerColumn(Spinner.Known.Dots12) ) .StartAsync(ctx => predicate(new SpectreProgressContext(ctx)));
public async Task GetPackagesDataFromNugetRepositoryAsync(string pathToCsProjFile, List <Package> packages) { await AnsiConsole.Progress() .Columns(new ProgressColumn[] { new TaskDescriptionColumn(), new ProgressBarColumn(), new PercentageColumn() }) .StartAsync(async ctx => { var task = ctx.AddTask(">"); while (!ctx.IsFinished) { await FetchPackagesDataAsync(task, packages); } }); }
static async Task <int> Main(string[] args) { var startTime = args.Length != 1 ? "19:00" : args[0]; if (!TimeSpan.TryParse(startTime, out var time)) { Console.Error.WriteLine("Invalid time"); return(1); } var timeToGo = time - DateTimeOffset.Now.TimeOfDay; var secondsToGo = timeToGo.TotalSeconds; AnsiConsole.WriteLine(); AnsiConsole.MarkupLine($"[lime]Mark Rendle will be live at {time.Hours:00}:{time.Minutes:00}...[/]"); AnsiConsole.WriteLine(); await AnsiConsole.Progress() .Columns(new ProgressColumn[] { new TaskDescriptionColumn(), new ProgressBarColumn(), new PercentageColumn(), new RemainingTimeColumn(), new SpinnerColumn() }) .StartAsync(async ctx => { // Add items in random order. Only first 7 will be added. var items = new ProgressItems(ctx, secondsToGo, TaskNames.OrderBy(_ => Guid.NewGuid())); while (!ctx.IsFinished) { await Task.Delay(1000); items.Increment(); } }); AnsiConsole.MarkupLine("Intro made with Spectre.Console. Check it out: [deepskyblue1]https://spectresystems.github.io/spectre.console/[/]"); return(0); }
private static void CopyAll(DirectoryInfo source, DirectoryInfo target) // метод для рекурсивного копирования всего внутри папки { Directory.CreateDirectory(target.FullName); // копируем файлы foreach (FileInfo fi in source.GetFiles()) { AnsiConsole.Progress().Start(ctx => { var task = ctx.AddTask($"[green]Copying {target.FullName}\\{fi.Name}[/]"); while (!ctx.IsFinished) { task.Increment(1.5); } }); try { fi.CopyTo(Path.Combine(target.FullName, fi.Name), true); } catch (UnauthorizedAccessException) { Console.WriteLine($"Couldn't copy {target.FullName}: Access Denied"); } } // копируем подпапки foreach (DirectoryInfo diSourceSubDir in source.GetDirectories()) { try { DirectoryInfo nextTargetSubDir = target.CreateSubdirectory(diSourceSubDir.Name); CopyAll(diSourceSubDir, nextTargetSubDir); } catch (UnauthorizedAccessException) { Console.WriteLine($"Couldn't access directory: Access Denied"); } } }
public static async Task <PingSession> StartAsync(PingRequestOptions options) { var pingRequestAgent = new PingRequestAgent(); var cancellationTokenSource = new CancellationTokenSource(); Console.CancelKeyPress += (sender, e) => { e.Cancel = true; cancellationTokenSource.Cancel(); }; PingSession results = null; if (options.NumberOfPings != -1) { await AnsiConsole.Progress() .Columns(new ProgressColumn[] { new TaskDescriptionColumn(), new ProgressBarColumn(), new PercentageColumn(), new RemainingTimeColumn() }) .StartAsync(async ctx => { var requestsRemaining = ctx.AddTask($"Sending {options.NumberOfPings} pings to [yellow]{options.Address}[/]", new ProgressTaskSettings { MaxValue = options.NumberOfPings }); pingRequestAgent.PingCompleted += (sender, e) => { requestsRemaining.Increment(1); }; results = await pingRequestAgent.StartAsync(options, cancellationTokenSource.Token); }); } else { await AnsiConsole.Status() .Spinner(Spinner.Known.Dots8Bit) .StartAsync($"Pinging {options.Address}...", async ctx => { pingRequestAgent.PingCompleted += (sender, e) => { if (e.CompletedPing.Status != IPStatus.Success) { AnsiConsole.MarkupLine("[grey54]{0:yyyy-MM-ddTHH:mm:ss}: {1}[/]", e.CompletedPing.RequestTime, e.CompletedPing.Status); } var packetsLostColour = "grey54"; if (e.Session.PacketsLostPercentage > 5) { packetsLostColour = "red"; } else if (Math.Round(e.Session.PacketsLostPercentage, 2) > 0) { packetsLostColour = "maroon"; } ctx.Status($"Continuously pinging [yellow]{options.Address}[/] [grey54]({e.Session.PacketsSent} sent, [{packetsLostColour}]{e.Session.PacketsLostPercentage:0.00}% lost[/], {e.Session.AverageRoundtrip}ms average, {(int)e.Session.Elapsed.TotalMinutes}:{e.Session.Elapsed.Seconds:00} elapsed)[/]"); }; results = await pingRequestAgent.StartAsync(options, cancellationTokenSource.Token); }); } if (results != null && results.PacketsSent > 0) { AnsiConsole.WriteLine(); AnsiConsole.Render(new Rule($"[white]Ping results for [yellow]{options.Address}[/][/]").RuleStyle("grey54")); AnsiConsole.WriteLine(); var table = new Table() .Centered() .AddColumns( new TableColumn("Packets (Sent/Received/Lost)").Centered(), new TableColumn("Minimum Roundtrip").Centered(), new TableColumn("Maximum Roundtrip").Centered(), new TableColumn("Average Roundtrip").Centered(), new TableColumn("Elapsed Time").Centered() ) .SimpleBorder(); table.AddRow( $"{results.PacketsSent} / {results.PacketsReceived} / {results.PacketsLost}", results.MinimumRoundtrip.ToString("0ms"), results.MaximumRoundtrip.ToString("0ms"), results.AverageRoundtrip.ToString("0ms"), $"{(int)results.Elapsed.TotalMinutes}:{results.Elapsed.Seconds:00}" ); AnsiConsole.Render(table); } else { AnsiConsole.WriteLine("No results available."); } AnsiConsole.WriteLine(); return(results); }
static void Main(string[] args) { Log.Logger = new LoggerConfiguration() .MinimumLevel.Debug() .WriteTo.Console(theme: ConsoleTheme.None) .CreateLogger(); var log = Log.ForContext <Program>(); var tempDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N")); Directory.CreateDirectory(tempDir); log.Information($"Temp dir: {tempDir}"); string outPath = Path.Combine(tempDir, "cooking.bin"); var fastText = new FastTextWrapper(loggerFactory: new LoggerFactory(new[] { new SerilogLoggerProvider() })); AnsiConsole.Progress() .Start(ctx => { var task = ctx.AddTask("Training"); var ftArgs = new SupervisedArgs { epoch = 15, lr = 1, dim = 300, wordNgrams = 2, minn = 3, maxn = 6, verbose = 0, TrainProgressCallback = (progress, loss, wst, lr, eta) => { task.Value = Math.Ceiling(progress * 100); task.Description = $"Loss: {loss:N3}, words/thread/sec: {wst}, LR: {lr:N5}, ETA: {eta}"; } }; fastText.Supervised("cooking.train.txt", outPath, ftArgs); }); try { File.Delete("_debug.txt"); } catch { } log.Information("Validating model on the test set"); var result = fastText.TestInternal("cooking.valid.txt", 1, 0.0f, true); log.Information($"Results:\n\tPrecision: {result.GlobalMetrics.GetPrecision()}" + $"\n\tRecall: {result.GlobalMetrics.GetRecall()}" + $"\n\tF1: {result.GlobalMetrics.GetF1()}"); var curve = result.GetPrecisionRecallCurve(); var(_, debugCurve) = TestResult.LoadDebugResult("_debug.txt", fastText.GetLabels()); string plotPath = PlotCurves(tempDir, new [] { curve, debugCurve }); log.Information($"Precision-Recall plot: {plotPath}"); Console.WriteLine("\nPress any key to exit."); Console.ReadKey(); Directory.Delete(tempDir, true); }
public static async Task Start() { string checkAgainResponse = ""; string filepath = null; using var cancellationTokenSource = new CancellationTokenSource(); ConsoleCancelEventHandler cancelHandler = (sender, args) => cancellationTokenSource.Cancel(); Console.CancelKeyPress += cancelHandler; do { Console.WriteLine("your password:"******"the hash is: {hash}"); if (filepath == null) { Console.WriteLine("file path to hash file:"); filepath = Console.ReadLine(); } if (File.Exists(filepath)) { filepath = Path.GetFullPath(filepath); Console.WriteLine(filepath); Console.WriteLine("search started"); var time = new Stopwatch(); time.Start(); await AnsiConsole.Progress() .StartAsync(async ctx => { // Define tasks var progressTask = ctx.AddTask("[green]Searching for password hash[/]");; var progressHandler = new Progress <double>(value => progressTask.Value = progressTask.Value); int occurrences = 0; try { occurrences = await PasswordChecker.FindPassword(hash, filepath, progressHandler, cancellationTokenSource.Token); } catch (OperationCanceledException ex) { Console.WriteLine("Canceled."); return; } time.Stop(); Console.WriteLine($"Search took {time.Elapsed}"); if (occurrences > 0) { Console.WriteLine($"Bad news: Your password was found inside the database. This password has been seen {occurrences} times before"); } else { Console.WriteLine("Good news: No occurences found"); } }); do { Console.WriteLine("Check another password? (Y/N)"); checkAgainResponse = Console.ReadLine(); }while (!checkAgainResponse.Equals("Y", StringComparison.CurrentCultureIgnoreCase) && !checkAgainResponse.Equals("N", StringComparison.CurrentCultureIgnoreCase)); } else { Console.WriteLine("filepath invaild or not found"); } } while (checkAgainResponse.Equals("Y", StringComparison.CurrentCultureIgnoreCase)); Console.CancelKeyPress -= cancelHandler; }
public override int Execute([NotNull] CommandContext context, [NotNull] SdkDownloadCommandSettings settings) { if (string.IsNullOrEmpty(settings?.Home)) { throw new ArgumentException(nameof(settings.Home)); } try { var dir = new DirectoryInfo(settings.Home); if (settings.Force) { if (dir.Exists) { dir.Delete(true); } } if (dir.Exists && ((dir.GetDirectories()?.Any() ?? false) || (dir.GetFiles()?.Any() ?? false))) { throw new InvalidOperationException("Directory already exists and is not empty!"); } if (!dir.Exists) { dir.Create(); } var m = new SdkManager(dir); m.FindToolPath(new DirectoryInfo(settings.Home)); var tcsResult = new TaskCompletionSource <int>(); var px = AnsiConsole.Progress(); ProgressTask dlTask = null; var progress = 0; m.DownloadSdk(dir, progressHandler: (p) => { progress = p; }).ContinueWith(t => { dlTask?.StopTask(); tcsResult.TrySetResult(0); }); AnsiConsole.Progress() .Start(ctx => { // Define tasks dlTask = ctx.AddTask("Downloading Android SDK..."); while (!ctx.IsFinished) { dlTask.Value = progress; } }); return(tcsResult.Task.Result); } catch (SdkToolFailedExitException sdkEx) { Program.WriteException(sdkEx); return(1); } }
public static async Task Main(string outDir) { Directory.CreateDirectory(outDir); await using var services = new Container(); try { await AnsiConsole.Progress() .StartAsync( async ctx => { var achievements = ctx.AddTask("Downloading achievements."); await using (var file = CreateTextCompressed(Path.Combine(outDir, "achievements.json.gz"))) { var service = services.Resolve <JsonAchievementService>(); var documents = await service.GetAllJsonAchievements(Update(achievements)); foreach (var document in documents) { await file.WriteLineAsync(document); } } var items = ctx.AddTask("Downloading items."); await using (var file = CreateTextCompressed(Path.Combine(outDir, "items.json.gz"))) { var service = services.Resolve <JsonItemService>(); var documents = await service.GetAllJsonItems(Update(items)); foreach (var document in documents) { await file.WriteLineAsync(document); } } var recipes = ctx.AddTask("Downloading recipes."); await using (var file = CreateTextCompressed(Path.Combine(outDir, "recipes.json.gz"))) { var service = services.Resolve <JsonRecipeService>(); var documents = await service.GetAllJsonRecipes(Update(recipes)); foreach (var document in documents) { await file.WriteLineAsync(document); } } var skins = ctx.AddTask("Downloading skins."); await using (var file = CreateTextCompressed(Path.Combine(outDir, "skins.json.gz"))) { var service = services.Resolve <JsonSkinService>(); var documents = await service.GetAllJsonSkins(Update(skins)); foreach (var document in documents) { await file.WriteLineAsync(document); } } var floors1 = ctx.AddTask("Downloading floors.").MaxValue(2); await using (var file = CreateTextCompressed( Path.Combine(outDir, "continents_1_floors.json.gz") )) { var service = services.Resolve <JsonFloorService>(); var documents = await service.GetAllJsonFloors(1); foreach (var document in documents) { await file.WriteLineAsync(document); } floors1.Increment(1); } await using (var file = CreateTextCompressed( Path.Combine(outDir, "continents_2_floors.json.gz") )) { var service = services.Resolve <JsonFloorService>(); var documents = await service.GetAllJsonFloors(2); foreach (var document in documents) { await file.WriteLineAsync(document); } floors1.Increment(1); } var prices = ctx.AddTask("Downloading prices."); await using (var file = CreateTextCompressed(Path.Combine(outDir, "prices.json.gz"))) { var service = services.Resolve <JsonItemPriceService>(); var documents = await service.GetJsonItemPrices(Update(prices)); foreach (var document in documents) { await file.WriteLineAsync(document); } } var orders = ctx.AddTask("Downloading orders."); await using (var file = CreateTextCompressed(Path.Combine(outDir, "listings.json.gz"))) { var service = services.Resolve <JsonOrderBookService>(); var documents = await service.GetJsonOrderBooks(Update(orders)); foreach (var document in documents) { await file.WriteLineAsync(document); } } } ); } catch (Exception crash) { AnsiConsole.WriteException(crash); } ; }
public override async Task <int> ExecuteAsync([NotNull] CommandContext context, [NotNull] DeleteVersionsSettings settings) { var logger = NullLogger.Instance; var cancellationToken = CancellationToken.None; var cache = new SourceCacheContext(); var repository = Repository.Factory.GetCoreV3(settings.PackageSource); var metadataResource = await repository.GetResourceAsync <PackageMetadataResource>(); var updateResource = await repository.GetResourceAsync <PackageUpdateResource>(); var packages = await metadataResource.GetMetadataAsync( settings.PackageId, includePrerelease : true, includeUnlisted : true, cache, logger, cancellationToken); if (!packages.Any()) { AnsiConsole.MarkupLine($"[yellow]Warning:[/] Could not find package named [green]{settings.PackageId}[/]."); return(0); } var pending = packages .Where(p => settings.VersionRange.Includes(p.Identity.Version)) .ToList(); if (!pending.Any()) { AnsiConsole.MarkupLine( $"[yellow]Warning:[/] Could not find versions that satisfies version range [green]{settings.VersionRange}[/]."); return(0); } await AnsiConsole.Progress() .StartAsync(async ctx => { var task = ctx.AddTask("Deleting packages"); foreach (var package in pending) { var packageId = package.Identity.Id; var packageVersion = package.Identity.Version.ToNormalizedString(); AnsiConsole.MarkupLine($"Deleting package [green]{packageId} {packageVersion}[/]..."); if (!settings.DryRun) { await updateResource.Delete( packageId, packageVersion, getApiKey: packageSource => settings.ApiKey, confirm: packageSource => true, noServiceEndpoint: false, logger); } task.Increment(100.0 / pending.Count); } task.Value = task.MaxValue; task.StopTask(); AnsiConsole.WriteLine("Done!"); }); return(0); }
public override int Execute( CommandContext context, ImportSettings settings ) { double last = 0.0; double prog = 0.0; ProgressTask progress = null; var mre = new ManualResetEvent(false); void UpdateProgress(double p) { prog = p * 100d; mre.Set(); } var task = Task.Run(() => { CsvDataReader csv = null; var database = settings.Database; var filename = settings.File; var loader = GetLoader(settings.Provider); var tableName = settings.Table ?? Path.GetFileNameWithoutExtension(filename); Stream s = File.OpenRead(settings.File); s = s.WithReadProgress(UpdateProgress, 0.001); var tr = new StreamReader(s); for (int i = 0; i < settings.Skip; i++) { tr.ReadLine(); } string schemaSpec = null; if (settings.Schema != null) { var schemaFile = settings.Schema; schemaSpec = File.ReadAllText(schemaFile); } else { var schemaFile = filename + ".schema"; if (File.Exists(schemaFile)) { schemaSpec = File.ReadAllText(schemaFile); } } var explicitSchema = schemaSpec == null ? null : Schema.Parse(schemaSpec); var schema = explicitSchema == null ? CsvSchema.Nullable : new CsvSchema(explicitSchema); var opts = new CsvDataReaderOptions { BufferSize = 0x100000, Schema = schema, }; csv = CsvDataReader.Create(tr, opts); loader.Load(csv, tableName, database); mre.Set(); }); // ensures that the progress loop finishes. task.ContinueWith(t => mre.Set()); AnsiConsole.Progress() .Columns(new ProgressColumn[] { new TaskDescriptionColumn(), // Task description new ProgressBarColumn(), // Progress bar new PercentageColumn(), // Percentage new RemainingTimeColumn(), // Remaining time new SpinnerColumn(), } ) .Start(ctx => { progress = ctx.AddTask("Import"); while (!task.IsCompleted) { mre.WaitOne(); var inc = prog - last; last = prog; progress.Increment(inc); mre.Reset(); } if (task.IsFaulted) { throw task.Exception; } else { // make sure it arrives at 100% if (last < 100d) { progress.Increment(100d - last); } } }); return(0); }
public static async Task <int> Main(string[] args) { var rootDirectory = Path.GetDirectoryName(Assembly.GetEntryAssembly()?.Location) ?? string.Empty; var projectRoot = Path.Combine(rootDirectory, "../../.."); var allowListFile = Path.Combine(projectRoot, "allow-list.txt"); var lines = await File.ReadAllLinesAsync(allowListFile); var knownFailing = new HashSet <string>(lines .Where(x => !string.IsNullOrWhiteSpace(x) && !x.StartsWith("#")) ); // this should be same in both Test262Harness.settings.json and here const string Sha = "08a9fc2b974f83a9835174cede20a7935f126015"; var stream = await Test262StreamExtensions.FromGitHub(Sha); // we materialize to give better feedback on progress var test262Files = new ConcurrentBag <Test262File>(); TestExecutionSummary?summary = null; AnsiConsole.Progress() .Columns( new TaskDescriptionColumn(), new ProgressBarColumn(), new PercentageColumn(), new SpinnerColumn(), new ElapsedTimeColumn() ) .Start(ctx => { var readTask = ctx.AddTask("Loading tests", maxValue: 90_000); readTask.StartTask(); test262Files = new ConcurrentBag <Test262File>(stream.GetTestFiles()); readTask.Value = 100; readTask.StopTask(); AnsiConsole.WriteLine(); AnsiConsole.MarkupLine("Found [green]{0}[/] test cases to test against", test262Files.Count); var testTask = ctx.AddTask("Running tests", maxValue: test262Files.Count); var options = new Test262RunnerOptions { Execute = static file => { var parser = new JavaScriptParser(file.Program); if (file.Type == ProgramType.Script) { parser.ParseScript(file.Strict); } else { parser.ParseModule(); } }, IsIgnored = file => knownFailing.Contains(file.ToString()), IsParseError = exception => exception is ParserException, ShouldThrow = file => file.NegativeTestCase?.Type == ExpectedErrorType.SyntaxError || file.NegativeTestCase?.Phase == TestingPhase.Parse, OnTestExecuted = _ => testTask.Increment(1) }; var executor = new Test262Runner(options); summary = executor.Run(test262Files); testTask.StopTask(); });
private static void Main(string[] args) { // needs to: // know where to find source files // know where to output generates files to // generate a selection of output images using different encoders and settings // stats we want: // Images // >>> File Specs // >>> Generation time // >>> Byte size // Overall tool run time // Input Image // ------------------------------ // File Spec | time | size // ------------------------------ // spec3840 | 0m 0s 50ms | 34kb // ------------------------------ var inputPath = string.Empty; Parser.Default.ParseArguments <Options>(args).WithParsed(o => { inputPath = o.InputPath; }); if (!Directory.Exists(inputPath)) { Console.WriteLine($"Input path \"{inputPath}\" does not exist. Cannot continue."); return; } // start an overall timer var overallStopwatch = new Stopwatch(); overallStopwatch.Start(); // get a list of all images in the input folder var files = Directory.GetFiles(inputPath, "*.jp*g", SearchOption.TopDirectoryOnly); if (files.Length == 0) { Console.WriteLine($"Input path \"{inputPath}\" doesn't contain any jpg/jpeg files. Cannot continue."); return; } // create the output folder if necessary // under this we will create a timestamped folder for each run of the program to allow for comparisons over time var outputPath = Path.Combine(inputPath, "output", DateTime.Now.Ticks.ToString()); Directory.CreateDirectory(outputPath); Console.WriteLine($"Created the output folder: {outputPath}"); // get the list of specifications for images we want to produce and compare with each other //var specs = ImageFileSpecsTestingQuality.ProduceImageFileSpecs(); //var specs = ImageFileSpecsTestingSharpnessWithRobidoux.ProduceImageFileSpecs(); //var specs = ImageFileSpecsTestingSharpnessWithMitchell.ProduceImageFileSpecs(); //var specs = ImageFileSpecsTesting05s.ProduceImageFileSpecs(); //var specs = ImageFileSpecsTesting10s.ProduceImageFileSpecs(); //var specs = ImageFileSpecsTesting15s.ProduceImageFileSpecs(); //var specs = ImageFileSpecsTesting20s.ProduceImageFileSpecs(); var specs = ImageFileSpecsTestingFormats.ProduceImageFileSpecs(); var imagesToGenerate = specs.Count * files.Length; OverallProgressIncrementAmount = 100d / imagesToGenerate; Console.WriteLine($"Generating {imagesToGenerate} images..."); AnsiConsole.Progress().Start(ctx => { // define progress tasks var overallProgress = ctx.AddTask("[green]Generating images[/]"); Parallel.ForEach(files, file => { var table = new Table(); table.Title(file); table.AddColumn("Image File Spec"); table.AddColumn("Time (ms)"); table.AddColumn("Size (kb)"); ProcessInputFileAsync(file, specs, outputPath, table, overallProgress).GetAwaiter().GetResult(); AnsiConsole.Render(table); }); }); overallStopwatch.Stop(); Console.WriteLine($"Tool took {overallStopwatch.Elapsed.Minutes}m {overallStopwatch.Elapsed.Seconds}s {overallStopwatch.Elapsed.Milliseconds}ms to complete."); }
/// <summary> /// Installs JQ /// </summary> /// <returns>A new awaitable <see cref="Task"/></returns> protected virtual async Task InstallJQIfNotExistsAsync() { if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { var directory = new DirectoryInfo(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles), "jq")); if (directory.Exists) { return; //already installed } directory.Create(); var process = Process.Start(new ProcessStartInfo("cmd.exe", @"/c echo ;%PATH%; | find /C /I ""jq""") { RedirectStandardOutput = true }); var output = await process !.StandardOutput.ReadToEndAsync(); process.Dispose(); if (int.Parse(output.Trim()) > 0) { return; //already installed } using var stream = new MemoryStream(); await AnsiConsole.Progress() .Columns(new ProgressColumn[] { new TaskDescriptionColumn(), new ProgressBarColumn(), new PercentageColumn(), new RemainingTimeColumn(), new SpinnerColumn() }) .HideCompleted(true) .StartAsync(async context => { await Task.Run(async() => { var task = context.AddTask($"Downloading [u]jq[/]", new ProgressTaskSettings { AutoStart = false }); await this.HttpClient.DownloadAsync($"https://github.com/stedolan/jq/releases/latest/download/jq-win64.exe", stream, task); }); }); var file = new FileInfo(Path.Combine(directory.FullName, "jq.exe")); using var fileStream = file.Create(); stream.Position = 0; await stream.CopyToAsync(fileStream); await fileStream.FlushAsync(); var pathValue = Environment.GetEnvironmentVariable("PATH") !; if (!pathValue.Trim().EndsWith(';')) { pathValue += ";"; } pathValue += directory.FullName; Environment.SetEnvironmentVariable("PATH", pathValue, EnvironmentVariableTarget.User); } else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { var process = Process.Start("bash", @"-c ""$ command -v foo >/dev/null 2>&1 || { echo 'I require foo but it's not installed. Aborting.' >&2; exit 1; }"""); await process.WaitForExitAsync(); if (process.ExitCode == 0) { return; } process.Dispose(); process = Process.Start("bash", @"-c ""apt-get update"""); await process.WaitForExitAsync(); process.Dispose(); process = Process.Start("bash", @"-c ""apt-get install jq -y"""); await process.WaitForExitAsync(); process.Dispose(); } else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { var process = Process.Start("brew install jq"); await process.WaitForExitAsync(); process.Dispose(); } else { throw new PlatformNotSupportedException(); } }
internal static Command Create() { var today = DateTime.Today; var aWeekAgo = today.AddDays(-7); var exportCommand = new Command("export") { new Option <string>("--apiKey", description: "An API key with permission to execute the command") { IsRequired = true, }, new Option <Guid>("--logId", "The ID of the log to export messages from") { IsRequired = true }, new Option <DateTime>("--dateFrom", $"Defines the Date from which the logs start. Ex. \" --dateFrom {aWeekAgo:yyyy-MM-dd}\"") { IsRequired = true, }, new Option <DateTime>("--dateTo", $"Defines the Date from which the logs end. Ex. \" --dateTo {today:yyyy-MM-dd}\"") { IsRequired = true, }, new Option <string>( "--filename", getDefaultValue: () => Path.Combine(Directory.GetCurrentDirectory(), $"Export-{DateTime.Now.Ticks}.json"), "Defines the path and filename of the file to export to. Ex. \" -Filename C:\\myDirectory\\myFile.json\""), new Option <string>("--query", getDefaultValue: () => "*", "Defines the query that is passed to the API"), new Option <bool>("--includeHeaders", "Include headers, cookies, etc. in output (will take longer to export)"), }; exportCommand.Description = "Export log messages from a specified log"; exportCommand.Handler = CommandHandler.Create <string, Guid, DateTime, DateTime, string, string, bool>((apiKey, logId, dateFrom, dateTo, filename, query, includeHeaders) => { var api = Api(apiKey); try { var startResult = api.Messages.GetAll(logId.ToString(), 0, 1, query, dateFrom, dateTo, includeHeaders); if (startResult == null) { AnsiConsole.MarkupLine("[#ffc936]Could not find any messages for this API key and log ID combination[/]"); } else { int messSum = startResult.Total.Value; if (messSum > 10000) { AnsiConsole.MarkupLine("[#ffc936]Query returned more than 10,000 messages. The exporter will cap at 10,000 messages. Consider using the -DateFrom, -DateTo, and/or the -Query parameters to limit the search result.[/]"); messSum = 10000; } AnsiConsole .Progress() .Start(ctx => { // Define tasks var task = ctx.AddTask("Exporting log messages", new ProgressTaskSettings { MaxValue = messSum, }); if (File.Exists(filename)) { File.Delete(filename); } using (StreamWriter w = File.AppendText(filename)) { int i = 0; w.WriteLine("["); while (i < messSum) { var respons = api.Messages.GetAll(logId.ToString(), i / 10, 10, query, dateFrom, dateTo, includeHeaders); foreach (Client.MessageOverview message in respons.Messages) { w.WriteLine(JValue.Parse(JsonConvert.SerializeObject(message)).ToString(Formatting.Indented)); i++; if (i != messSum) { w.WriteLine(","); } task.Increment(1); } } w.WriteLine("]"); } task.StopTask(); }); AnsiConsole.MarkupLine($"[green]Done with export to [/][grey]{filename}[/]"); } } catch (Exception e) { AnsiConsole.MarkupLine($"[red]{e.Message}[/]"); } }); return(exportCommand); }
static void Main(string[] args) { var timer = new Stopwatch(); timer.Start(); var src = args.Length > 0 ? args[0] : "O:/Old software/Amiga/TOSEC 2016-11-11/Commodore Amiga - Demos - Animations and Videos"; var dest = args.Length > 1 ? args[1] : "./files"; var noSplit = args.Any(x => x.Equals("--nosplit", StringComparison.InvariantCultureIgnoreCase)); var unpack = args.Any(x => x.Equals("--unpack", StringComparison.InvariantCultureIgnoreCase)); var useFilter = args.Any(x => x.Equals("--filter", StringComparison.InvariantCultureIgnoreCase)); var dryrun = args.Any(x => x.Equals("--dryrun", StringComparison.InvariantCultureIgnoreCase)); var includeFolders = args.Any(x => x.Equals("--folders", StringComparison.InvariantCultureIgnoreCase)); var flattenSubdirs = args.Any(x => x.Equals("--flatten", StringComparison.InvariantCultureIgnoreCase)); List<FileSystemInfo> fsItems = null; // List<DirectoryInfo> dirs = null; AnsiConsole.Status() .Start($"Reading files from [bold]{src.Replace("[", "[[").Replace("]", "]]")}[/]", ctx => { ctx.Spinner(Spinner.Known.Dots); var srcDir = new DirectoryInfo(src); if (!includeFolders) fsItems = srcDir .GetFiles() .Select(x => x as FileSystemInfo) .Where(f => !useFilter || !Filter.IsMatch(f.Name)) .OrderBy(x => x.Name) .ToList(); else fsItems = srcDir .GetFileSystemInfos() .Select(x => x as FileSystemInfo) .Where(f => !useFilter || !Filter.IsMatch(f.Name)) .OrderBy(x => x.Name) .ToList(); }); AnsiConsole.MarkupLine($"Found [green]{fsItems.Count}[/] files in [bold]{src.Replace("[", "[[").Replace("]", "]]")}[/]."); var duplicates = 0; var useSubFolders = !noSplit && fsItems.Count > 200; AnsiConsole.Progress() .AutoClear(false) .Columns(new ProgressColumn[] { new TaskDescriptionColumn(), // Task description new ProgressBarColumn(), // Progress bar new PercentageColumn(), // Percentage new RemainingTimeColumn(), // Remaining time new SpinnerColumn(), // Spinner }) .Start(ctx => { var task1 = ctx.AddTask("Removing duplicates", new ProgressTaskSettings { MaxValue = fsItems.Count }); var uniqueFiles = new ConcurrentBag<FileSystemInfo>(); Parallel.ForEach(fsItems.GroupBy(x => x.Name[0]), (itemsByFirstLetter, _, _) => { Parallel.ForEach(itemsByFirstLetter, (file, state, index) => { var relevantName = RelevantName(file.Name); var isDuplicate = itemsByFirstLetter.Take((int)index).Any(x => RelevantName(x.Name).Equals(relevantName, StringComparison.InvariantCultureIgnoreCase)); if (isDuplicate) { duplicates++; } else uniqueFiles.Add(file); task1.Increment(1); }); }); task1.StopTask(); var task3 = ctx.AddTask($"Copying files to [bold]{dest}[/]", new ProgressTaskSettings { AutoStart = false, MaxValue = uniqueFiles.Count }); task3.Description = $"{(unpack ? "Unpacking" : "Copying")} {uniqueFiles.Count} files to [bold]{dest}[/]"; task3.StartTask(); foreach(var file in uniqueFiles) { var destination = dest; if (useSubFolders) { var group = Words.Match(file.Name).Value.ToUpperInvariant(); if (Digits.IsMatch(group)) group = "0"; destination = Path.Combine(dest, group); } if (!dryrun) { if (!Directory.Exists(destination)) Directory.CreateDirectory(destination); if (!unpack) { if (file is FileInfo f) f.CopyTo(Path.Combine(destination, file.Name), true); else if (file is DirectoryInfo d) { if (!flattenSubdirs) destination = Directory.CreateDirectory(Path.Combine(destination, d.Name)).FullName; foreach(var fileInDir in d.GetFiles()) fileInDir.CopyTo(Path.Combine(destination, fileInDir.Name)); } } else ZipFile.ExtractToDirectory(file.FullName, destination, true); } task3.Increment(1); } }); timer.Stop(); AnsiConsole.MarkupLine($"Found [red]{duplicates}[/] duplicates in [green]{fsItems.Count}[/] files"); AnsiConsole.MarkupLine($"Time elapsed: [bold]{timer.Elapsed} [/]seconds"); }
internal static Command Create() { var dataloaderCommand = new Command("dataloader") { new Option <string>("--apiKey", description: "An API key with permission to execute the command") { IsRequired = true, }, new Option <Guid>("--logId", "The log ID of the log to import messages into") { IsRequired = true }, }; dataloaderCommand.Description = "Load 50 log messages into the specified log"; dataloaderCommand.Handler = CommandHandler.Create <string, Guid>((apiKey, logId) => { var api = Api(apiKey); var random = new Random(); var yesterday = DateTime.UtcNow.AddDays(-1); AnsiConsole .Progress() .Start(ctx => { var numberOfMessages = 50; // Define tasks var task = ctx.AddTask("Loading log messages", new ProgressTaskSettings { MaxValue = numberOfMessages, }); try { for (var i = 0; i < numberOfMessages; i++) { var r = random.NextDouble(); api.Messages.CreateAndNotify(logId, new CreateMessage { //Application = "Elmah.Io.DataLoader", Cookies = new[] { new Item("ASP.NET_SessionId", "lm5lbj35ehweehwha2ggsehh"), new Item("_ga", "GA1.3.1580453215.1783132008"), }, Data = new[] { new Item("Father", "Stephen Falken"), }, DateTime = yesterday.AddMinutes(random.Next(1440)), Detail = DotNetStackTrace, Form = new[] { new Item("Username", "Joshua"), new Item("Password", "********"), }, QueryString = new[] { new Item("logid", logId.ToString()) }, ServerVariables = new[] { new Item("REMOTE_ADDR", "1.1.1.1"), new Item("CERT_KEYSIZE", "256"), new Item("CONTENT_LENGTH", "0"), new Item("QUERY_STRING", "logid=" + logId), new Item("REQUEST_METHOD", Method(r)), new Item("HTTP_USER_AGENT", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36"), new Item("HTTP_CF_IPCOUNTRY", "AU"), new Item("URL", Url(r)), new Item("HTTP_HOST", "foo.bar"), }, Hostname = "Web01", Severity = Severity(r), Source = "Elmah.Io.Cli.exe", StatusCode = StatusCode(r), Title = Title(r), Type = Type(r), Url = Url(r), Method = Method(r), User = User(r), Version = "1.1.0", Application = "Dataloader", }); task.Increment(1); } } catch (Exception e) { AnsiConsole.MarkupLine($"[red]{e.Message}[/]"); } finally { task.StopTask(); } }); AnsiConsole.MarkupLine("[green]Successfully loaded [/][grey]50[/][green] log messages[/]"); }); return(dataloaderCommand); }
/// <summary> /// Handles the <see cref="NativeInstallCommand"/> /// </summary> /// <param name="directory">The directory to install Synapse to</param> /// <returns>A new awaitable <see cref="Task"/></returns> public async Task HandleAsync(string directory) { var process = Process.Start(new ProcessStartInfo("cmd.exe", @"/c echo ;%PATH%; | find /C /I ""synapse""") { RedirectStandardOutput = true }); var output = await process !.StandardOutput.ReadToEndAsync(); process.Dispose(); if (int.Parse(output.Trim()) > 0) { return; //already installed } if (string.IsNullOrWhiteSpace(directory)) { if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { directory = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles), "CNCF", "Synapse"); } else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { directory = Path.Combine("usr", "local", "cncf", "synapse"); } else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { directory = Path.Combine("Applications", "CNCF", "Synapse"); } else { throw new PlatformNotSupportedException(); } } var directoryInfo = new DirectoryInfo(directory); if (!directoryInfo.Exists) { directoryInfo.Create(); } await this.InstallJQIfNotExistsAsync(); var target = null as string; if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { target = "win-x64.zip"; } else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { target = "linux-x64.tar.gz"; } else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { target = "osx-64.tar.gz"; } else { throw new PlatformNotSupportedException(); } using var packageStream = new MemoryStream(); await AnsiConsole.Progress() .Columns(new ProgressColumn[] { new TaskDescriptionColumn(), new ProgressBarColumn(), new PercentageColumn(), new RemainingTimeColumn(), new SpinnerColumn(), }) .HideCompleted(true) .StartAsync(async context => { await Task.Run(async() => { var task = context.AddTask($"Downloading [u]synapse-{target}[/]", new ProgressTaskSettings { AutoStart = false }); await this.HttpClient.DownloadAsync($"https://github.com/serverlessworkflow/synapse/releases/download/{typeof(NativeInstallCommand).Assembly.GetName()!.Version!.ToString(3)}/synapse-{target}", packageStream, task); }); }); AnsiConsole.Status() .Start("Extracting [u]synapse-{target}[/]...", ctx => { using var archive = new ZipArchive(packageStream, ZipArchiveMode.Read); archive.ExtractToDirectory(directoryInfo.FullName, true); }); if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { var pathValue = Environment.GetEnvironmentVariable("PATH") !; if (!pathValue.Trim().EndsWith(';')) { pathValue += ";"; } pathValue += directoryInfo.FullName; Environment.SetEnvironmentVariable("PATH", pathValue, EnvironmentVariableTarget.User); } }