public override void AddOperation <TOperation>(ICheckConstraint constraint) { var operation = Activator.CreateInstance <TOperation>() as BasicOperation <T>; var operationPair = new OperationDuplex <T>(new OperationInvoker <T>(operation), constraint); ParallelOperations.Add(operationPair); }
public override void AddOperation <TOperation>() { var operation = Activator.CreateInstance <TOperation>(); var operationPair = new OperationDuplex <T>(new OperationInvoker <T>(operation as BasicOperation <T>)); ParallelOperations.Add(operationPair); }
public void SetConfig(string moduleName, SaveConfigRequest request) { try { var module = GetModuleFromManager(moduleName); var serialization = CreateSerialization(module); var config = GetConfig(module, true); EntryConvert.UpdateInstance(config, request.Config.Root, serialization); ConfigManager.SaveConfiguration(config, request.UpdateMode == ConfigUpdateMode.UpdateLiveAndSave); if (request.UpdateMode == ConfigUpdateMode.SaveAndReincarnate) { // This has to be done parallel so we can also reincarnate the Maintenance itself ParallelOperations.ExecuteParallel(() => ModuleManager.ReincarnateModule(module)); } } catch (Exception ex) { Logger.LogException(LogLevel.Warning, ex, "Failed to save config of {0}", moduleName); #if USE_WCF var ctx = WebOperationContext.Current; // ReSharper disable once PossibleNullReferenceException ctx.OutgoingResponse.StatusCode = HttpStatusCode.InternalServerError; #else Response.StatusCode = (int)HttpStatusCode.InternalServerError; #endif } }
public void Setup() { _parallelOperations = new ParallelOperations { Logger = new DummyLogger() }; }
public void Squares_given_1_and_5_returns_1_4_9_16_25() { var result = ParallelOperations.Squares(1, 5); var expected = new List <long> { 1, 4, 9, 16, 25 }; Assert.Equal(expected, result.OrderBy(p => p)); }
public void Setup() { _logger.ClearBuffer(); _threadFactory = new ParallelOperations { Logger = _logger, }; _callbackReceivedEvent.Reset(); }
public void CreateThumbnails_resizer_called_with_right_parameters() { var strings = new string[] { "file000132701536" }; var folder = "OutputFolder"; var size = new Size(34, 34); var mock = new Mock <IPictureModule>(); ParallelOperations.CreateThumbnails(mock.Object, strings, folder, size); mock.Verify(s => s.Resize(strings[0], Path.Combine(folder, Path.GetFileName(strings[0])), size)); }
public void TestRezized() { string directory = Directory .GetParent(Assembly.GetExecutingAssembly().Location) .Parent.Parent.Parent.Parent.FullName + @"\BDSA2017.Assignment06"; IEnumerable <string> test = Directory.GetFiles(directory + @"\images"); ParallelOperations.CreateThumbnails(new PictureModule(), test, directory + @"\imageRezized", new Size(1000, 1000)); var mock = new Mock <IPictureModule>(); Assert.False(true); }
internal string ToFeedbackString() { var info = $@"{ApplicationFullVersion} {ApplicationDescription} "; var repoDefaults = new Repository(); var colInfo = new int[] { 0, 30, 65, 75 }; var table = new List <string[]>() { new string[] { "Configuration entry", "Description", "Value", "Default" }, new string[] { "===================", "===========", "=====", "=======" }, Array.Empty <string>(), new string[] { nameof(UploadFolder), "Upload Folder", UploadFolder, repoDefaults.UploadFolder }, new string[] { nameof(ArchiveFolder), "Archive Folder", ArchiveFolder, repoDefaults.ArchiveFolder }, new string[] { nameof(TransferCheckpointFilename), "Transfer Checkpoint Filename", TransferCheckpointFilename, repoDefaults.TransferCheckpointFilename }, new string[] { nameof(BlockSize), "Tx Block Size", BlockSize.ToSizeSuffix(), repoDefaults.BlockSize.ToSizeSuffix() }, new string[] { nameof(ParallelOperations), "Parallel Operations", ParallelOperations.ToString(), repoDefaults.ParallelOperations.ToString() }, new string[] { nameof(DefaultConnectionLimit), "Default Connection Limit", DefaultConnectionLimit.ToString(), repoDefaults.DefaultConnectionLimit.ToString() }, new string[] { nameof(Expect100Continue), "Wait for '100' response?", Expect100Continue.ToString(), repoDefaults.Expect100Continue.ToString() }, new string[] { nameof(Recursive), "Recurse the upload folder", Recursive.ToString(), repoDefaults.Recursive.ToString() }, Array.Empty <string>(), new string[] { nameof(BlobContainerUri), "Azure Blob Container", BlobDirectory?.Uri.ToString(), repoDefaults.BlobDirectory?.Uri.ToString() }, new string[] { nameof(FileContainerUri), "Azure File Directory", FileDirectory?.Uri.ToString(), repoDefaults.FileDirectory?.Uri.ToString() }, Array.Empty <string>(), new string[] { "For details of the configuration options see: https://github.com/Azure/azure-storage-net-data-movement/" }, }; table.ForEach(row => { var line = ""; for (int i = 0; i < row.Length; i++) { if (line.Length > colInfo[i]) { info = info.TrimEnd(' ') + $"\n{line}"; line = string.Empty; } line = $"{line.PadRight(colInfo[i])}{row[i]} "; } info = info.TrimEnd(' ') + $"\n{line}"; }); return(info); }
public void InterruptQueue() { // Arrange var localThreading = new ParallelOperations(); // Act localThreading.ScheduleExecution(_queue.Enqueue, new DummyMessage(), (int)(Delay * 0.5), -1); localThreading.ScheduleExecution(_queue.Enqueue, new DummyMessage(), (int)(Delay * 1.5), -1); Thread.Sleep(Delay * 2); _queue.Stop(); var preClear = _times.Count; _times.Clear(); Thread.Sleep(Delay * 2); // Assert Assert.GreaterOrEqual(preClear, 0); Assert.AreEqual(0, _times.Count); }
public void SetConfig(string moduleName, SaveConfigRequest request) { try { var module = GetModuleFromManager(moduleName); var serialization = CreateSerialization(module); var config = GetConfig(module, true); EntryConvert.UpdateInstance(config, request.Config.Root, serialization); ConfigManager.SaveConfiguration(config, request.UpdateMode == ConfigUpdateMode.UpdateLiveAndSave); if (request.UpdateMode == ConfigUpdateMode.SaveAndReincarnate) { // This has to be done parallel so we can also reincarnate the Maintenance itself ParallelOperations.ExecuteParallel(() => ModuleManager.ReincarnateModule(module)); } } catch (Exception ex) { Logger.LogException(LogLevel.Warning, ex, "Failed to save config of {0}", moduleName); HttpHelper.SetStatusCode(HttpStatusCode.InternalServerError); } }
public override void AddOperation(IWorkflow <T> workflow) { var operationPair = new OperationDuplex <T>(new WorkflowInvoker <T>(workflow)); ParallelOperations.Add(operationPair); }
public void TestSquared() { Assert.Equal(new long[] { 1, 4, 9, 16, 25 }, ParallelOperations.Squares(1, 5)); }
public void TriggerHelloCallback(string name) { ParallelOperations.ScheduleExecution(() => ServiceManager.HelloCallback(name), 100, Timeout.Infinite); }
public void Reincarnate(string moduleName) { var module = GetModuleFromManager(moduleName); ParallelOperations.ExecuteParallel(ModuleManager.ReincarnateModule, module); }
public override void AddOperation(IOperation <T> operation) { var operationPair = new OperationDuplex <T>(new OperationInvoker <T>(operation as BasicOperation <T>)); ParallelOperations.Add(operationPair); }
public override void AddOperation(IOperation <T> operation, ICheckConstraint constraint) { var operationPair = new OperationDuplex <T>(new OperationInvoker <T>(operation as BasicOperation <T>), constraint); ParallelOperations.Add(operationPair); }
public override void AddOperation(Func <T, T> function) { var operationPair = new OperationDuplex <T>(new FunctionInvoker <T>(function)); ParallelOperations.Add(operationPair); }
public override void AddOperation(Func <T, T> function, ICheckConstraint constraint) { var operationPair = new OperationDuplex <T>(new FunctionInvoker <T>(function), constraint); ParallelOperations.Add(operationPair); }
/// <summary>+ /// Handle multiple validations in parallel /// </summary> /// <returns></returns> private async Task ParallelValidation(ParallelOperations level, ILifetimeScope scope, ExecutionContext context, List <ValidationContextParameters> parameters) { var contexts = parameters.Select(parameter => new ValidationContext(scope, parameter)).ToList(); var plugin = contexts.First().ValidationPlugin; try { // Prepare for challenge answer if (level.HasFlag(ParallelOperations.Prepare)) { // Parallel _log.Verbose("Handle {n} preparation(s)", contexts.Count); var prepareTasks = contexts.Select(vc => PrepareChallengeAnswer(vc, context.RunLevel)); await Task.WhenAll(prepareTasks); foreach (var ctx in contexts) { TransferErrors(ctx, context.Result); } if (!context.Result.Success) { return; } } else { // Serial foreach (var ctx in contexts) { await PrepareChallengeAnswer(ctx, context.RunLevel); TransferErrors(ctx, context.Result); if (!context.Result.Success) { return; } } } // Commit var commited = await CommitValidation(plugin); if (!commited) { context.Result.AddErrorMessage("Commit failed"); return; } // Submit challenge answer var contextsWithChallenges = contexts.Where(x => x.Challenge != null).ToList(); if (contextsWithChallenges.Any()) { if (level.HasFlag(ParallelOperations.Answer)) { // Parallel _log.Verbose("Handle {n} answers(s)", contextsWithChallenges.Count); var answerTasks = contextsWithChallenges.Select(vc => AnswerChallenge(vc)); await Task.WhenAll(answerTasks); foreach (var ctx in contextsWithChallenges) { TransferErrors(ctx, context.Result); } if (!context.Result.Success) { return; } } else { // Serial foreach (var ctx in contextsWithChallenges) { await AnswerChallenge(ctx); TransferErrors(ctx, context.Result); if (!context.Result.Success) { return; } } } } } finally { // Cleanup await CleanValidation(plugin); } }
public override void AddOperation(IWorkflow <T> workflow, ICheckConstraint constraint) { var operationPair = new OperationDuplex <T>(new WorkflowInvoker <T>(workflow), constraint); ParallelOperations.Add(operationPair); }