private double GetPerSecond(ConcurrentDictionary <int, int> entries) { var mpsKeys = entries.Keys.OrderBy(x => x); var mpsFirst = mpsKeys.FirstOrDefault(); var mpsLast = mpsKeys.LastOrDefault(); var mpsSeconds = mpsLast - mpsFirst; if (mpsSeconds == 0) { return(0); } return(entries.Sum(x => x.Value) / (double)mpsSeconds); }
public LowMemoryHandlerStatistics GetStats() { return(new LowMemoryHandlerStatistics { Name = "CachedIndexedTerms", Metadata = new { IndexName = indexName }, DatabaseName = "CachedIndexedTerms", EstimatedUsedMemory = Results.Sum(x => x.Key.Length * sizeof(char) + x.Value.Results.Sum(y => y.Key.Length * sizeof(char) + y.Value.Length * sizeof(int))) }); }
public void ConcurrentDictionaryActionQueueTest() { ConcurrentDictionaryActionQueue <int> userActions = new ConcurrentDictionaryActionQueue <int>(3); for (int k = 0; k < 1000; k++) { System.Diagnostics.Stopwatch sw = new Stopwatch(); ConcurrentDictionary <int, ValueObject> userValues = new ConcurrentDictionary <int, ValueObject>(); int total = 1000; int[] userids = new[] { 1, 2, 3, 4, 5, 6, 7 }; int[] values = new int[userids.Length]; int done = 0; Random random = new Random(); sw.Start(); Parallel.For(0, total, (index) => { var userid = random.Next(1, 8); var userValue = userValues.GetOrAdd(userid, (u) => new ValueObject()); Interlocked.Increment(ref userValue.Value); userActions.Add(userid, () => { values[userid - 1]++; Interlocked.Increment(ref done); }); }); userActions.WaitAll(); var sum1 = values.Sum(); var sum2 = userValues.Sum(m => m.Value.Value); if (sum1 != total) { throw new Exception("total error"); } for (int i = 0; i < values.Length; i++) { var value1 = values[i]; var value2 = userValues[userids[i]].Value; if (value1 != value2) { throw new Exception("value error"); } } sw.Stop(); System.Diagnostics.Debug.WriteLine("第{0}欑:{1}ms", k + 1, sw.ElapsedMilliseconds); } }
public void ζγ³γ³γγγΉγγnullγ§γεγ() { const int NumExecuteLoop = 30; var c = new Channel <Message>(); ISender <Holder <Message> > s = c.ObserveOn(null); var subscriberThreadIds = new ConcurrentDictionary <int, int>(); s.Subscribe(async(m, ct) => { await Task.Yield(); subscriberThreadIds.AddOrUpdate(Thread.CurrentThread.ManagedThreadId, 1, (k, v) => v + 1); await Task.Run(() => { }); subscriberThreadIds.AddOrUpdate(Thread.CurrentThread.ManagedThreadId, 1, (k, v) => v + 1); await Task.Delay(1); subscriberThreadIds.AddOrUpdate(Thread.CurrentThread.ManagedThreadId, 1, (k, v) => v + 1); }); var executorThreadIds = new ConcurrentDictionary <int, int>(); Func <CancellableReceiver <Message>, Task> executor = async r => { await Task.Run(() => { }).ConfigureAwait(false); for (int i = 0; i < NumExecuteLoop; i++) { await Task.Delay(1); executorThreadIds.AddOrUpdate(Thread.CurrentThread.ManagedThreadId, 1, (k, v) => v + 1); await r.SendAsync(new MessageA("abc")); executorThreadIds.AddOrUpdate(Thread.CurrentThread.ManagedThreadId, 1, (k, v) => v + 1); var res = (await r.SendAsync(new MessageB(0, i))).GetResponse(); executorThreadIds.AddOrUpdate(Thread.CurrentThread.ManagedThreadId, 1, (k, v) => v + 1); } }; c.Execute(executor, CancellationToken.None); c.Completed.Wait(); Assert.IsTrue(subscriberThreadIds.Keys.Count() > 1); Assert.IsTrue(executorThreadIds.Keys.Count() > 1); Assert.AreEqual(2 * 3 * NumExecuteLoop, subscriberThreadIds.Sum(x => x.Value)); Assert.AreEqual(3 * NumExecuteLoop, executorThreadIds.Sum(x => x.Value)); }
public NodeInfoVM GetInfo() { var info = new NodeInfoVM { About = _nodeSettings.About, NodeName = _nodeSettings.Name, Peers = _peersByAddress.Count, Blocks = _blockchain.Count, ConfirmedTransactions = _confirmedTransactionsById.Count, PendingTransactions = _pendingTransactionsById.Count, Addresses = _addresses.Count, //TODO: this number can be cached, changes only when the blockchain is modified Coins = _addresses.Sum(a => a.Value.Amount) }; return(info); }
public string getReport() { var onModelCreatingCount = demoContext.CountDown; StringBuilder sb = new StringBuilder(); sb.AppendLine($"onModelCreating runed times: {onModelCreatingCount}\n"); sb.AppendLine($"tenant count: {dic.Count}"); sb.AppendLine($"calling count: {dic.Sum(r => r.Value)}"); foreach (var item in dic) { sb.AppendLine($"{item.Key}"); } return(sb.ToString()); }
/// <summary> /// Unsubscribe all subscriptions /// </summary> /// <returns></returns> public virtual async Task UnsubscribeAll() { log.Write(LogVerbosity.Debug, $"Closing all {sockets.Sum(s => s.Value.handlers.Count(h => h.UserSubscription))} subscriptions"); await Task.Run(() => { var tasks = new List <Task>(); { var socketList = sockets.Values; foreach (var sub in socketList) { tasks.Add(sub.Close()); } } Task.WaitAll(tasks.ToArray()); }).ConfigureAwait(false); }
public KeyValuePair <IEnumerable <int>, double> Reproduction() { // Normalize, accumulate and order fitness by descending var fitnessSum = population.Sum(pair => pair.Value); var accumulator = 0.0; var roulette = (from pair in population orderby pair.Value descending select new { Chromosome = pair.Key, Fitness = pair.Value, AccumulatudFitness = (accumulator += pair.Value / fitnessSum) }).ToArray(); // Count of children pairs to be produced var childrenPairs = (int)(populationSize * (1 - elitismRate) / 2); // Take count of elite chromosomes population = new ConcurrentDictionary <IEnumerable <int>, double>(roulette.Take(populationSize - childrenPairs * 2).ToDictionary(pair => pair.Chromosome, pair => pair.Fitness)); // Produce children and fill population with them Parallel.For(0, childrenPairs, _ => { foreach (var child in from chromosome in Crossover(roulette.First(pair => pair.AccumulatudFitness >= random.NextDouble()).Chromosome, roulette.First(pair => pair.AccumulatudFitness >= random.NextDouble()).Chromosome) select Mutation(chromosome)) { population[child] = Fitness(child); } }); // Return fittest cluster return(population.Aggregate((max, next) => next.Value > max.Value ? next : max)); }
private void LimitSize() { var size = compilerCache.Sum(tuple => tuple.Value.Item2.Length); while (size > GlobalOptions.Instance.CompilerCacheFileMaxSizeBytes) { var oldestKey = ""; var oldestTime = DateTime.UtcNow; foreach (var tuple in compilerCache) { if (tuple.Value.Item1 < oldestTime) { oldestTime = tuple.Value.Item1; oldestKey = tuple.Key; } } size -= compilerCache[oldestKey].Item2.Length; Tuple <DateTime, string> deleted; compilerCache.TryRemove(oldestKey, out deleted); } }
private async Task <ConcurrentDictionary <string, List <Document> > > ReadDocuments(CancellationToken cancellation = default) { using (this.StartOperation(_telemetry)) { _logger.LogInformation($"Started reading documents from {_source.Db}/{_source.Collection}"); var docsByType = new ConcurrentDictionary <string, List <Document> >(); var block = new ActionBlock <string>( async(docType) => { var query = new SqlQuerySpec( "select * from c where c.documentType = @documentType", new SqlParameterCollection(new[] { new SqlParameter("@documentType", docType) })); var documents = await _sourceClient.Query <Document>(query, cancel: cancellation); var docs = documents.ToList(); docsByType.AddOrUpdate(docType, docs, (k, v) => docs); _logger.LogInformation($"Read {docs.Count} documents for type {docType}."); _docCount.TrackValue(docs.Count, $"read_{docType}"); }, new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = _syncSettings.MaxDegreeOfParallelism, CancellationToken = cancellation }); foreach (var docType in _syncSettings.DocumentTypes) { block.Post(docType); } block.Complete(); await block.Completion; _logger.LogInformation($"Total of {docsByType.Sum(kvp => kvp.Value.Count)} documents found."); return(docsByType); } }
protected override async Task <IEnumerable <Common.IDocument> > ExecuteContextAsync(IExecutionContext context) { // Key = source, Value = tag HTML ConcurrentDictionary <string, ConcurrentBag <string> > failures = new ConcurrentDictionary <string, ConcurrentBag <string> >(); // Resolve the xrefs in parallel IEnumerable <Common.IDocument> outputs = await context.Inputs .ParallelSelectAsync(async input => await ResolveDocumentXrefsAsync(input, context, failures)); // Report failures and throw if there are any if (failures.Count > 0) { int failureCount = failures.Sum(x => x.Value.Count); string failureMessage = string.Join( Environment.NewLine, failures.Select(x => $"{x.Key}{Environment.NewLine} - {string.Join(Environment.NewLine + " - ", x.Value)}")); context.LogError($"{failureCount} xref resolution failures:{Environment.NewLine}{failureMessage}"); throw new ExecutionException("Encountered some invalid xrefs"); } return(outputs); }
public async Task CanHandleMultipleWorkItemInstances() { const int workItemCount = 1000; using (var metrics = new InMemoryMetricsClient(new InMemoryMetricsClientOptions { LoggerFactory = Log })) { var options = new InMemoryQueueOptions <WorkItemData> { Retries = 0, RetryDelay = TimeSpan.Zero, LoggerFactory = Log }; using (var queue = new InMemoryQueue <WorkItemData>(options)) { queue.AttachBehavior(new MetricsQueueBehavior <WorkItemData>(metrics, loggerFactory: Log)); using (var messageBus = new InMemoryMessageBus(new InMemoryMessageBusOptions { LoggerFactory = Log })) { var handlerRegistry = new WorkItemHandlers(); var j1 = new WorkItemJob(queue, messageBus, handlerRegistry, Log); var j2 = new WorkItemJob(queue, messageBus, handlerRegistry, Log); var j3 = new WorkItemJob(queue, messageBus, handlerRegistry, Log); int errors = 0; var jobIds = new ConcurrentDictionary <string, int>(); handlerRegistry.Register <MyWorkItem>(async ctx => { var jobData = ctx.GetData <MyWorkItem>(); Assert.Equal("Test", jobData.SomeData); int jobWorkTotal = jobIds.AddOrUpdate(ctx.JobId, 1, (key, value) => value + 1); if (jobData.Index % 100 == 0) { _logger.Trace("Job {jobId} processing work item #: {jobWorkTotal}", ctx.JobId, jobWorkTotal); } for (int i = 0; i < 10; i++) { await ctx.ReportProgressAsync(10 * i); } if (RandomData.GetBool(1)) { Interlocked.Increment(ref errors); throw new Exception("Boom!"); } }); for (int i = 0; i < workItemCount; i++) { await queue.EnqueueAsync(new MyWorkItem { SomeData = "Test", Index = i }, true); } var completedItems = new List <string>(); object completedItemsLock = new object(); await messageBus.SubscribeAsync <WorkItemStatus>(status => { if (status.Progress == 100) { _logger.Trace("Progress: {progress}", status.Progress); } if (status.Progress < 100) { return; } lock (completedItemsLock) completedItems.Add(status.WorkItemId); }); var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(10)); var tasks = new List <Task> { Task.Run(async() => { await j1.RunUntilEmptyAsync(cancellationTokenSource.Token); cancellationTokenSource.Cancel(); }, cancellationTokenSource.Token), Task.Run(async() => { await j2.RunUntilEmptyAsync(cancellationTokenSource.Token); cancellationTokenSource.Cancel(); }, cancellationTokenSource.Token), Task.Run(async() => { await j3.RunUntilEmptyAsync(cancellationTokenSource.Token); cancellationTokenSource.Cancel(); }, cancellationTokenSource.Token) }; try { await Task.WhenAll(tasks); } catch (OperationCanceledException ex) { _logger.Error(ex, $"One or more tasks were cancelled: {ex.Message}"); } await SystemClock.SleepAsync(100); _logger.Info("Completed: {completedItems} Errors: {errors}", completedItems.Count, errors); Assert.Equal(workItemCount, completedItems.Count + errors); Assert.Equal(3, jobIds.Count); Assert.Equal(workItemCount, jobIds.Sum(kvp => kvp.Value)); } } } }
public void ShowModificationImportWindowDialog(string modificationPath) { var rootNodeViewModel = modificationImportViewModelFactory.FromDirectory(modificationPath); var solution = riotSolutionLoader.Load(@"C:\Riot Games\League of Legends\RADS", RiotProjectType.AirClient | RiotProjectType.GameClient); var airResolver = new Resolver(solution.ProjectsByType[RiotProjectType.AirClient].ReleaseManifest.Root); var gameResolver = new Resolver(solution.ProjectsByType[RiotProjectType.GameClient].ReleaseManifest.Root); var fileNodes = rootNodeViewModel.EnumerateFileNodes().ToArray(); var importWindow = new ModificationImportWindow(); var modificationImportViewModel = new ModificationImportViewModel(this, importWindow, rootNodeViewModel); modificationImportViewModel.ModificationFriendlyName = fileSystemProxy.GetDirectoryInfo(modificationPath).Name; importWindow.DataContext = modificationImportViewModel; new Thread(() => { foreach (var fileNode in fileNodes) { var path = fileNode.Path; var airResolution = airResolver.Resolve(path); if (airResolution.Any()) { fileNode.ResolutionPath = airResolution.First().GetPath(); fileNode.ResolutionState = ResolutionState.ResolutionSuccessful; } else { var gameResolutions = gameResolver.Resolve(path); if (gameResolutions.Any()) { fileNode.ResolutionPath = gameResolutions.First().GetPath(); fileNode.ResolutionState = ResolutionState.ResolutionSuccessful; } else { fileNode.ResolutionState = ResolutionState.ResolutionFailed; } } } LeagueModificationCategory modificationType = LeagueModificationCategory.Other; if (fileNodes.Any(node => node.ResolutionState == ResolutionState.ResolutionSuccessful)) { var modificationTypeCounts = new ConcurrentDictionary <LeagueModificationCategory, int>(); foreach (var file in fileNodes) { if (file.ResolutionState == ResolutionState.ResolutionSuccessful) { if (file.ResolutionPath.IndexOf("DATA/Characters", StringComparison.OrdinalIgnoreCase) != -1 || file.ResolutionPath.IndexOf("assets/images/champions", StringComparison.OrdinalIgnoreCase) != -1) { if (file.ResolutionPath.IndexOf("ward", StringComparison.OrdinalIgnoreCase) != -1) { modificationTypeCounts.AddOrUpdate(LeagueModificationCategory.Ward, 1, (existing, count) => count + 1); } else { modificationTypeCounts.AddOrUpdate(LeagueModificationCategory.Champion, 1, (existing, count) => count + 1); } } else if (file.ResolutionPath.IndexOf("LEVELS") != -1) { modificationTypeCounts.AddOrUpdate(LeagueModificationCategory.Map, 1, (existing, count) => count + 1); } else if (file.ResolutionPath.IndexOf("Menu", StringComparison.OrdinalIgnoreCase) != -1) { modificationTypeCounts.AddOrUpdate(LeagueModificationCategory.UserInterface, 1, (existing, count) => count + 1); } else { modificationTypeCounts.AddOrUpdate(LeagueModificationCategory.Other, 1, (existing, count) => count + 1); } } } var categorizationCounts = modificationTypeCounts.Sum(x => x.Value); var highestCategorization = modificationTypeCounts.MaxBy(key => key.Value, Comparer <int> .Default); if (highestCategorization.Value >= categorizationCounts * 2.0 / 3.0) { modificationType = modificationTypeCounts.MaxBy(key => key.Value, Comparer <int> .Default).Key; } Console.WriteLine("Highest categorization: " + highestCategorization.Key.Name); modificationTypeCounts.ForEach(x => Console.WriteLine(x.Key.Name + ": " + x.Value)); Application.Current.Dispatcher.BeginInvoke(DispatcherPriority.Send, new Action(() => { modificationImportViewModel.ModificationCategorization = modificationType; })); } }).Start(); importWindow.ShowDialog(); }
public static int ConcurrentCount() { return(ConcurrencyDictionary.Sum(kv => kv.Value ? 1 : 0)); }
public async Task CanHandleMultipleWorkItemInstances() { var metrics = new InMemoryMetricsClient(); var queue = new InMemoryQueue<WorkItemData>(retryDelay: TimeSpan.Zero, retries: 0); queue.AttachBehavior(new MetricsQueueBehavior<WorkItemData>(metrics)); var messageBus = new InMemoryMessageBus(); var handlerRegistry = new WorkItemHandlers(); var j1 = new WorkItemJob(queue, messageBus, handlerRegistry); var j2 = new WorkItemJob(queue, messageBus, handlerRegistry); var j3 = new WorkItemJob(queue, messageBus, handlerRegistry); int errors = 0; var jobIds = new ConcurrentDictionary<string, int>(); handlerRegistry.Register<MyWorkItem>(ctx => { var jobData = ctx.GetData<MyWorkItem>(); Assert.Equal("Test", jobData.SomeData); jobIds.AddOrUpdate(ctx.JobId, 1, (key, value) => value + 1); for (int i = 0; i < 10; i++) ctx.ReportProgress(10 * i); if (RandomData.GetBool(1)) { Interlocked.Increment(ref errors); throw new ApplicationException("Boom!"); } return TaskHelper.Completed(); }); for (int i = 0; i < 100; i++) queue.Enqueue(new MyWorkItem { SomeData = "Test", Index = i }, true); var completedItems = new List<string>(); object completedItemsLock = new object(); messageBus.Subscribe<WorkItemStatus>(status => { if (status.Progress < 100) return; lock (completedItemsLock) completedItems.Add(status.WorkItemId); }); var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(10)); var token = cancellationTokenSource.Token; var tasks = new List<Task>(); tasks.AddRange(new[] { Task.Run(async () => await j1.RunUntilEmptyAsync(token), token), Task.Run(async () => await j2.RunUntilEmptyAsync(token), token), Task.Run(async () => await j3.RunUntilEmptyAsync(token), token), }); await Task.WhenAll(tasks); Thread.Sleep(10); Assert.Equal(100, completedItems.Count + errors); Assert.Equal(3, jobIds.Count); Assert.Equal(100, jobIds.Sum(kvp => kvp.Value)); }
public static int ParallelismCount() { return(ParallelismDictionary.Sum(kv => kv.Value ? 1 : 0)); }
public void CanDoCompetingConsumers(int messageCount, int threadCount) { using (CreateQueue(InputQueueName).Purge()) { } var keepRunning = true; var lastMessageReceivedTime = DateTime.UtcNow; var receivedMessagesDistribution = new ConcurrentDictionary <int, int>(); var receivers = Enumerable.Range(0, threadCount) .Select(i => { var queue = TrackDisposable(CreateQueue(InputQueueName)); var number = i + 1; return(new Thread(() => { Console.WriteLine("Receiver {0} started", number); while (keepRunning) { using (var tx = new TransactionScope()) { var receivedMessage = queue.ReceiveMessage(new AmbientTransactionContext()); if (receivedMessage != null) { receivedMessagesDistribution.AddOrUpdate(number, (key) => 1, (key, value) => value + 1); Console.Write("."); lastMessageReceivedTime = DateTime.UtcNow; } tx.Complete(); } } Console.WriteLine("Receiver {0} stopped", number); })); }) .ToList(); var sender = CreateQueue("test_competing_sender"); Console.WriteLine("Sending {0} messages", messageCount); messageCount.Times(() => sender.Send(InputQueueName, new TransportMessageToSend { Headers = new Dictionary <string, object>(), Body = Encoding.UTF8.GetBytes("w00000t!") }, new NoTransaction())); Console.WriteLine("Starting {0} receivers", receivers.Count); receivers.ForEach(r => r.Start()); lastMessageReceivedTime = DateTime.UtcNow; while (lastMessageReceivedTime.ElapsedUntilNow() < 3.Seconds()) { Console.WriteLine("Waiting..."); Thread.Sleep(2.Seconds()); } Console.WriteLine("Stopping receivers..."); keepRunning = false; receivers.ForEach(r => r.Join()); Console.WriteLine("Got {0} messages distributed among workers like this:", receivedMessagesDistribution.Sum(d => d.Value)); Console.WriteLine(string.Join(Environment.NewLine, receivedMessagesDistribution.Select(kvp => string.Format("{0:000}: {1}", kvp.Key, new string('=', kvp.Value))))); }
public async Task WordCountExample() { var topicForText = GetNewTopic(); var topicForWords = GetNewTopic(); using var textProducer = Configure.Producer(c => c.UseKafka(KafkaTestConfig.Address)) .Logging(l => l.UseConsole(minimumLogLevel: LogLevel.Info)) .Serialization(s => s.UseNewtonsoftJson()) .Create(); using var tokenizerConsumer = Configure.Consumer("tokenizer", c => c.UseKafka(KafkaTestConfig.Address)) .Logging(l => l.UseConsole(minimumLogLevel: LogLevel.Info)) .Topics(t => t.Subscribe(topicForText)) .Positions(p => p.StoreInMemory()) .Serialization(s => s.UseNewtonsoftJson()) .Options(o => o.AddContextInitializer(c => c.SetItem(textProducer))) .Handle(async(messages, context, _) => { var producer = context.GetItem <IToposProducer>(); var words = messages.Select(m => m.Body).OfType <MessageWithText>() .SelectMany(m => m.Text.Split(' ')); var toposMessages = words .Select(word => new ToposMessage(new MessageWithSingleWord(word))); await producer.SendMany(topicForWords, toposMessages); }) .Create(); var wordCounts = new ConcurrentDictionary <string, int>(); using var wordCounterConsumer = Configure.Consumer("word-counter", c => c.UseKafka(KafkaTestConfig.Address)) .Logging(l => l.UseConsole(minimumLogLevel: LogLevel.Info)) .Topics(t => t.Subscribe(topicForWords)) .Positions(p => p.StoreInMemory()) .Serialization(s => s.UseNewtonsoftJson()) .Handle(async(messages, _, _) => { var words = messages.Select(m => m.Body) .OfType <MessageWithSingleWord>().Select(m => m.Word); foreach (var word in words) { wordCounts.AddOrUpdate(word, _ => 1, (_, value) => value + 1); } }) .Create(); const string textFromGitHub = @"Nuget packages corresponding to all commits to release branches are available from the following nuget package source (Note: this is not a web URL - you should specify it in the nuget package manger): https://ci.appveyor.com/nuget/confluent-kafka-dotnet. The version suffix of these nuget packages matches the appveyor build number. You can see which commit a particular build number corresponds to by looking at the AppVeyor build history"; tokenizerConsumer.Start(); wordCounterConsumer.Start(); await textProducer.SendMany(topicForText, Enumerable.Range(0, 1000).Select(_ => new ToposMessage(new MessageWithText(textFromGitHub)))); await Task.Delay(TimeSpan.FromSeconds(10)); Console.WriteLine($@"Got these word counts: {string.Join(Environment.NewLine, wordCounts.OrderByDescending(kvp => kvp.Value).Select(kvp => $" {kvp.Value}: '{kvp.Key}'"))} SUM: {wordCounts.Sum(kvp => kvp.Value)}"); }
public void CanDoCompetingConsumers(int messageCount, int threadCount) { using (CreateQueue(InputQueueName).Purge()) { } var keepRunning = true; var lastMessageReceivedTime = DateTime.UtcNow; var receivedMessagesDistribution = new ConcurrentDictionary<int, int>(); var receivers = Enumerable.Range(0, threadCount) .Select(i => { var queue = TrackDisposable(CreateQueue(InputQueueName)); var number = i + 1; return new Thread(() => { Console.WriteLine("Receiver {0} started", number); while (keepRunning) { using (var tx = new TransactionScope()) { var receivedMessage = queue.ReceiveMessage(new AmbientTransactionContext()); if (receivedMessage != null) { receivedMessagesDistribution.AddOrUpdate(number, (key) => 1, (key, value) => value + 1); Console.Write("."); lastMessageReceivedTime = DateTime.UtcNow; } tx.Complete(); } } Console.WriteLine("Receiver {0} stopped", number); }); }) .ToList(); var sender = CreateQueue("test_competing_sender"); Console.WriteLine("Sending {0} messages", messageCount); messageCount.Times(() => sender.Send(InputQueueName, new TransportMessageToSend { Headers = new Dictionary<string, object>(), Body = Encoding.UTF8.GetBytes("w00000t!") }, new NoTransaction())); Console.WriteLine("Starting {0} receivers", receivers.Count); receivers.ForEach(r => r.Start()); lastMessageReceivedTime = DateTime.UtcNow; while (lastMessageReceivedTime.ElapsedUntilNow() < 3.Seconds()) { Console.WriteLine("Waiting..."); Thread.Sleep(2.Seconds()); } Console.WriteLine("Stopping receivers..."); keepRunning = false; receivers.ForEach(r => r.Join()); Console.WriteLine("Got {0} messages distributed among workers like this:", receivedMessagesDistribution.Sum(d => d.Value)); Console.WriteLine(string.Join(Environment.NewLine, receivedMessagesDistribution.Select(kvp => string.Format("{0:000}: {1}", kvp.Key, new string('=', kvp.Value))))); }
/// <inheritdoc /> public IEnumerable <IDocument> Execute(IReadOnlyList <IDocument> inputs, IExecutionContext context) { #pragma warning disable RCS1163 // Unused parameter. // Handle invalid HTTPS certificates and allow alternate security protocols (see http://stackoverflow.com/a/5670954/807064) ServicePointManager.ServerCertificateValidationCallback = (s, cert, chain, ssl) => true; #pragma warning restore RCS1163 // Unused parameter. // Key = link, Value = source, tag HTML ConcurrentDictionary <string, ConcurrentBag <(string documentSource, string outerHtml)> > links = new ConcurrentDictionary <string, ConcurrentBag <(string documentSource, string outerHtml)> >(); // Key = source, Value = tag HTML ConcurrentDictionary <string, ConcurrentBag <string> > failures = new ConcurrentDictionary <string, ConcurrentBag <string> >(); // Gather all links HtmlParser parser = new HtmlParser(); context.ParallelForEach(inputs, input => GatherLinks(input, parser, links)); // This policy will limit the number of executing link validations. // Limit the amount of concurrent link checks to avoid overwhelming servers. Task[] tasks = links.Select( async link => { // Attempt to parse the URI if (!Uri.TryCreate(link.Key, UriKind.RelativeOrAbsolute, out Uri uri)) { AddOrUpdateFailure(link.Value, failures); } // Adjustment for double-slash link prefix which means use http:// or https:// depending on current protocol // The Uri class treats these as relative, but they're really absolute if (uri.ToString().StartsWith("//") && !Uri.TryCreate($"http:{link.Key}", UriKind.Absolute, out uri)) { AddOrUpdateFailure(link.Value, failures); } // Relative if (!uri.IsAbsoluteUri && _validateRelativeLinks && !(await ValidateRelativeLink(uri, context).ConfigureAwait(false))) { AddOrUpdateFailure(link.Value, failures); } // Absolute if (uri.IsAbsoluteUri && _validateAbsoluteLinks && !(await ValidateAbsoluteLink(uri, context).ConfigureAwait(false))) { AddOrUpdateFailure(link.Value, failures); } }).ToArray(); Task.WaitAll(tasks); // Report failures if (failures.Count > 0) { int failureCount = failures.Sum(x => x.Value.Count); string failureMessage = string.Join( Environment.NewLine, failures.Select(x => $"{x.Key}{Environment.NewLine} - {string.Join(Environment.NewLine + " - ", x.Value)}")); Trace.TraceEvent( _asError ? TraceEventType.Error : TraceEventType.Warning, $"{failureCount} link validation failures:{Environment.NewLine}{failureMessage}"); } return(inputs); }
/// <summary> /// Generates the user types. /// </summary> private void GenerateUserTypes() { // Verify that included files exist if (!string.IsNullOrEmpty(xmlConfig.GeneratedAssemblyName)) { foreach (var file in includedFiles) { if (!File.Exists(file.Path)) { throw new FileNotFoundException("Included file not found", file.Path); } } } // Loading modules ConcurrentDictionary <Module, XmlModule> modules = new ConcurrentDictionary <Module, XmlModule>(); ConcurrentDictionary <XmlModule, Symbol[]> globalTypesPerModule = new ConcurrentDictionary <XmlModule, Symbol[]>(); logger.Write("Loading modules..."); Parallel.ForEach(xmlModules, (xmlModule) => { Module module = moduleProvider.Open(xmlModule); modules.TryAdd(module, xmlModule); }); logger.WriteLine(" {0}", stopwatch.Elapsed); // Enumerating symbols logger.Write("Enumerating symbols..."); Parallel.ForEach(modules, (mm) => { XmlModule xmlModule = mm.Value; Module module = mm.Key; string moduleName = xmlModule.Name; string nameSpace = xmlModule.Namespace; HashSet <Symbol> symbols = new HashSet <Symbol>(); foreach (var type in typeNames) { Symbol[] foundSymbols = module.FindGlobalTypeWildcard(type.NameWildcard); if (foundSymbols.Length == 0) { errorLogger.WriteLine("Symbol not found: {0}", type.Name); } else { foreach (Symbol symbol in foundSymbols) { symbols.Add(symbol); } } if (type.ExportDependentTypes) { foreach (Symbol symbol in foundSymbols) { symbol.ExtractDependentSymbols(symbols, xmlConfig.Transformations); } } } if (symbols.Count == 0) { foreach (Symbol symbol in module.GetAllTypes()) { symbols.Add(symbol); } } globalTypesPerModule.TryAdd( xmlModule, symbols.Where(t => t.Tag == CodeTypeTag.Class || t.Tag == CodeTypeTag.Structure || t.Tag == CodeTypeTag.Union || t.Tag == CodeTypeTag.Enum).ToArray()); }); List <Symbol> allSymbols = new List <Symbol>(); Symbol[][] symbolsPerModule = globalTypesPerModule.Select(ss => ss.Value).ToArray(); int maxSymbols = symbolsPerModule.Max(ss => ss.Length); for (int i = 0; i < maxSymbols; i++) { for (int j = 0; j < symbolsPerModule.Length; j++) { if (i < symbolsPerModule[j].Length) { allSymbols.Add(symbolsPerModule[j][i]); } } } logger.WriteLine(" {0}", stopwatch.Elapsed); #if false // Initialize symbol fields and base classes logger.Write("Initializing symbol values..."); Parallel.ForEach(Partitioner.Create(allSymbols), (symbol) => { var fields = symbol.Fields; var baseClasses = symbol.BaseClasses; }); logger.WriteLine(" {0}", sw.Elapsed); #endif logger.Write("Deduplicating symbols..."); // Group duplicated symbols Dictionary <string, List <Symbol> > symbolsByName = new Dictionary <string, List <Symbol> >(); Dictionary <Symbol, List <Symbol> > duplicatedSymbols = new Dictionary <Symbol, List <Symbol> >(); foreach (var symbol in allSymbols) { List <Symbol> symbols; if (!symbolsByName.TryGetValue(symbol.Name, out symbols)) { symbolsByName.Add(symbol.Name, symbols = new List <Symbol>()); } bool found = false; foreach (var s in symbols.ToArray()) { if (s.Size != 0 && symbol.Size != 0 && s.Size != symbol.Size) { #if DEBUG logger.WriteLine("{0}!{1} ({2}) {3}!{4} ({5})", s.Module.Name, s.Name, s.Size, symbol.Module.Name, symbol.Name, symbol.Size); #endif continue; } if (s.Size == 0 && symbol.Size != 0) { List <Symbol> duplicates; if (!duplicatedSymbols.TryGetValue(s, out duplicates)) { duplicatedSymbols.Add(s, duplicates = new List <Symbol>()); } duplicatedSymbols.Remove(s); duplicates.Add(s); duplicatedSymbols.Add(symbol, duplicates); symbols.Remove(s); symbols.Add(symbol); } else { List <Symbol> duplicates; if (!duplicatedSymbols.TryGetValue(s, out duplicates)) { duplicatedSymbols.Add(s, duplicates = new List <Symbol>()); } duplicates.Add(symbol); } found = true; break; } if (!found) { symbols.Add(symbol); } } // Unlink duplicated symbols if two or more are named the same foreach (var symbols in symbolsByName.Values) { if (symbols.Count <= 1) { continue; } foreach (var s in symbols.ToArray()) { List <Symbol> duplicates; if (!duplicatedSymbols.TryGetValue(s, out duplicates)) { continue; } symbols.AddRange(duplicates); duplicatedSymbols.Remove(s); } } // Extracting deduplicated symbols Dictionary <string, Symbol[]> deduplicatedSymbols = new Dictionary <string, Symbol[]>(); Dictionary <Symbol, string> symbolNamespaces = new Dictionary <Symbol, string>(); foreach (var symbols in symbolsByName.Values) { if (symbols.Count != 1 || modules.Count == 1) { foreach (var s in symbols) { symbolNamespaces.Add(s, modules[s.Module].Namespace); } } else { Symbol symbol = symbols.First(); List <Symbol> duplicates; if (!duplicatedSymbols.TryGetValue(symbol, out duplicates)) { duplicates = new List <Symbol>(); } duplicates.Insert(0, symbol); deduplicatedSymbols.Add(symbol.Name, duplicates.ToArray()); foreach (var s in duplicates) { symbolNamespaces.Add(s, xmlConfig.CommonTypesNamespace); } } } var globalTypes = symbolsByName.SelectMany(s => s.Value).ToArray(); logger.WriteLine(" {0}", stopwatch.Elapsed); logger.WriteLine(" Total symbols: {0}", globalTypesPerModule.Sum(gt => gt.Value.Length)); logger.WriteLine(" Unique symbol names: {0}", symbolsByName.Count); logger.WriteLine(" Dedupedlicated symbols: {0}", globalTypes.Length); // Initialize GlobalCache with deduplicatedSymbols GlobalCache.Update(deduplicatedSymbols); // Collecting types logger.Write("Collecting types..."); foreach (var module in modules.Keys) { userTypes.Add(userTypeFactory.AddSymbol(module.GlobalScope, new XmlType() { Name = "ModuleGlobals" }, modules[module].Namespace, generationOptions)); } ConcurrentBag <Symbol> simpleSymbols = new ConcurrentBag <Symbol>(); Dictionary <Tuple <string, string>, List <Symbol> > templateSymbols = new Dictionary <Tuple <string, string>, List <Symbol> >(); Parallel.ForEach(Partitioner.Create(globalTypes), (symbol) => { string symbolName = symbol.Name; // TODO: Add configurable filter // if (symbolName.StartsWith("$") || symbolName.StartsWith("__vc_attributes") || symbolName.Contains("`anonymous-namespace'") || symbolName.Contains("`anonymous namespace'") || symbolName.Contains("::$") || symbolName.Contains("`")) { return; } // Do not handle template referenced arguments if (symbolName.Contains("&")) { // TODO: Convert this to function pointer return; } // TODO: For now remove all unnamed-type symbols string scopedClassName = symbol.Namespaces.Last(); if (scopedClassName.StartsWith("<") || symbolName.Contains("::<")) { return; } // Check if symbol contains template type. if (SymbolNameHelper.ContainsTemplateType(symbolName) && (symbol.Tag == CodeTypeTag.Class || symbol.Tag == CodeTypeTag.Structure || symbol.Tag == CodeTypeTag.Union)) { List <string> namespaces = symbol.Namespaces; string className = namespaces.Last(); var symbolId = Tuple.Create(symbolNamespaces[symbol], SymbolNameHelper.CreateLookupNameForSymbol(symbol)); lock (templateSymbols) { if (templateSymbols.ContainsKey(symbolId) == false) { templateSymbols[symbolId] = new List <Symbol>() { symbol } } ; else { templateSymbols[symbolId].Add(symbol); } } // TODO: // Do not add physical types for template specialization (not now) // do if types contains static fields // nested in templates } else { simpleSymbols.Add(symbol); } }); logger.WriteLine(" {0}", stopwatch.Elapsed); // Populate Templates logger.Write("Populating templates..."); foreach (List <Symbol> symbols in templateSymbols.Values) { Symbol symbol = symbols.First(); string symbolName = SymbolNameHelper.CreateLookupNameForSymbol(symbol); XmlType type = new XmlType() { Name = symbolName }; userTypes.AddRange(userTypeFactory.AddSymbols(symbols, type, symbolNamespaces[symbol], generationOptions)); } logger.WriteLine(" {0}", stopwatch.Elapsed); // Specialized class logger.Write("Populating specialized classes..."); foreach (Symbol symbol in simpleSymbols) { userTypes.Add(userTypeFactory.AddSymbol(symbol, null, symbolNamespaces[symbol], generationOptions)); } logger.WriteLine(" {0}", stopwatch.Elapsed); // To solve template dependencies. Update specialization arguments once all the templates has been populated. logger.Write("Updating template arguments..."); foreach (TemplateUserType templateUserType in userTypes.OfType <TemplateUserType>()) { foreach (TemplateUserType specializedTemplateUserType in templateUserType.SpecializedTypes) { if (!specializedTemplateUserType.UpdateTemplateArguments(userTypeFactory)) { #if DEBUG logger.WriteLine("Template user type cannot be updated: {0}", specializedTemplateUserType.Symbol.Name); #endif } } } logger.WriteLine(" {0}", stopwatch.Elapsed); // Post processing user types (filling DeclaredInType) logger.Write("Post processing user types..."); var namespaceTypes = userTypeFactory.ProcessTypes(userTypes, symbolNamespaces).ToArray(); userTypes.AddRange(namespaceTypes); logger.WriteLine(" {0}", stopwatch.Elapsed); }
/// <summary> /// This method is added for unit testing purposes. /// </summary> protected virtual int CalculateSum(ConcurrentDictionary <string, int> versions) { return(versions.Sum(kvp => kvp.Value)); }
private double CalculateTotalProgress(object id, double newValue) { _progressValues.AddOrUpdate(id, newValue, (key, oldValue) => newValue); return(_progressValues.Sum(entry => entry.Value) / _progressValues.Count); }
/** * <summary>Run method. * Invokes the set of examples. * </summary> */ public static void Run() { Console.WriteLine("\n\n" + ("ParallelForEach - MapReduce (Memory<T>)" + " " + new string('=', 115)).Substring(0, 115)); Program.HaltIfDebug(); Dictionary <string, int> wordStoreDict = new Dictionary <string, int>(); ConcurrentDictionary <string, int> wordStore = new ConcurrentDictionary <string, int>(); ParallelOptions parallelOptions = new ParallelOptions() { MaxDegreeOfParallelism = -1 }; // Set to -1 for .NET to determine int totalWords; KeyValuePair <string, int> minKvp; KeyValuePair <string, int> maxKvp; object lockObj = new object(); Stopwatch stopwatch = new Stopwatch(); // Get the text string for a long text file int multiplier = 200; Memory <char> fileText = GetFileText(multiplier); //Thread.Sleep(5 * 1000); GCSettings.LargeObjectHeapCompactionMode = GCLargeObjectHeapCompactionMode.CompactOnce; GC.Collect(); Console.WriteLine("\nSequential For: \n"); wordStore.Clear(); stopwatch.Reset(); stopwatch.Start(); // Map the words foreach (Memory <char> wordBlock in ProduceWordBlocks(fileText)) { StringBuilder wordBuffer = new StringBuilder(); List <string> wordsList = new List <string>(); // Split the words, cleanup each word and map it char character; for (int i = 0; i < wordBlock.Length; i++) { character = wordBlock.Span[i]; if (character == ' ') { continue; } while (character != ' ') { if (char.IsLetterOrDigit(character) || character == '\'' || character == '-') { wordBuffer.Append(character); } if (++i >= wordBlock.Length) { break; } character = Convert.ToChar(wordBlock.Slice(i, 1).ToString()); } // Save the word if (wordBuffer.Length > 0) { wordsList.Add(wordBuffer.ToString()); wordBuffer.Clear(); } } //Console.WriteLine($"Aggregating {wordsList.Count} words for thread: {Thread.CurrentThread.ManagedThreadId}"); foreach (string word in wordsList) { int value = 0; wordStoreDict.TryGetValue(word, out value); // Get the current value for this word if it exists or zero wordStoreDict[word] = ++value; // Increment the value and update or add the word } } stopwatch.Stop(); Console.WriteLine($"Duration: {stopwatch.Elapsed}"); // Report word counts totalWords = wordStoreDict.Sum(kvp => kvp.Value); Console.WriteLine($"\nNumber of unique found: {wordStoreDict.Count} - Total words found: {totalWords,1:#,###,###,###,##0}"); minKvp = wordStoreDict.OrderBy(kvp => kvp.Key).Aggregate((l_kvp, r_kvp) => l_kvp.Value < r_kvp.Value ? l_kvp : r_kvp); maxKvp = wordStoreDict.OrderBy(kvp => kvp.Key).Aggregate((l_kvp, r_kvp) => l_kvp.Value > r_kvp.Value ? l_kvp : r_kvp); Console.WriteLine($"Word: {minKvp.Key,15:s1} - occurs the minimum number of times: {minKvp.Value,1:#,###,###,###,##0}"); Console.WriteLine($"Word: {maxKvp.Key,15:s1} - occurs the minimum number of times: {maxKvp.Value,1:#,###,###,###,##0}"); //Thread.Sleep(5 * 1000); GCSettings.LargeObjectHeapCompactionMode = GCLargeObjectHeapCompactionMode.CompactOnce; GC.Collect(); Console.WriteLine("\nParallel ForEach: \n"); wordStore.Clear(); stopwatch.Reset(); stopwatch.Start(); // Map the words Parallel.ForEach <Memory <char>, List <string> >(ProduceWordBlocks(fileText) // input enumerator , parallelOptions // parallel options - MaxDegreeOfparallelism , () => // initialize the ThreadLocal variable { return(new List <string>()); } , (wordBlock, loopState, wordsList) => // body delegate { StringBuilder wordBuffer = new StringBuilder(); // Split the words, cleanup each word and map it char character; for (int i = 0; i < wordBlock.Length; i++) { character = wordBlock.Span[i]; if (character == ' ') { continue; } while (character != ' ') { if (char.IsLetterOrDigit(character) || character == '\'' || character == '-') { wordBuffer.Append(character); } if (++i >= wordBlock.Length) { break; } character = Convert.ToChar(wordBlock.Slice(i, 1).ToString()); } // Save the word if (wordBuffer.Length > 0) { wordsList.Add(wordBuffer.ToString()); wordBuffer.Clear(); } } return(wordsList); } , (wordsList) => // thread aggregator { //Console.WriteLine($"Aggregating {wordsList.Count} words for thread: {Thread.CurrentThread.ManagedThreadId}"); Parallel.ForEach(wordsList , parallelOptions , word => { // if the word exists, use a thread safe delegate to increment the value by 1 // otherwise, add the word with a default value of 1 wordStore.AddOrUpdate(word, 1, (key, oldValue) => Interlocked.Increment(ref oldValue)); } ); } ); stopwatch.Stop(); Console.WriteLine($"Duration: {stopwatch.Elapsed}"); // Report word counts totalWords = wordStore.Sum(kvp => kvp.Value); Console.WriteLine($"\nNumber of unique found: {wordStore.Count} - Total words found: {totalWords,1:#,###,###,###,##0}"); minKvp = wordStore.OrderBy(kvp => kvp.Key).Aggregate((l_kvp, r_kvp) => l_kvp.Value < r_kvp.Value ? l_kvp : r_kvp); maxKvp = wordStore.OrderBy(kvp => kvp.Key).Aggregate((l_kvp, r_kvp) => l_kvp.Value > r_kvp.Value ? l_kvp : r_kvp); Console.WriteLine($"Word: {minKvp.Key,15:s1} - occurs the minimum number of times: {minKvp.Value,1:#,###,###,###,##0}"); Console.WriteLine($"Word: {maxKvp.Key,15:s1} - occurs the minimum number of times: {maxKvp.Value,1:#,###,###,###,##0}"); return; }
public void CheckScan() { // Invoked on a parallel thread in LargeNaniteOreDetecotrLogic if (!m_block.Enabled || !m_block.IsFunctional || !Sink.IsPoweredByType(MyResourceDistributorComponent.ElectricityId)) { MyAPIGateway.Utilities.InvokeOnGameThread(() => { m_depositGroupsByEntity.Clear(); m_inRangeCache.Clear(); }); return; } CheckIsTooCloseToOtherDetector(); if (m_tooCloseToOtherDetector) { MyAPIGateway.Utilities.InvokeOnGameThread(() => { m_depositGroupsByEntity.Clear(); m_inRangeCache.Clear(); m_block.Enabled = false; }); return; } Vector3D position = m_block.GetPosition(); BoundingSphereD sphere = new BoundingSphereD(position, Range); MyGamePruningStructure.GetAllVoxelMapsInSphere(ref sphere, m_inRangeCache); RemoveVoxelMapsOutOfRange(); AddVoxelMapsInRange(); UpdateDeposits(ref sphere); m_inRangeCache.Clear(); int totalInitialTasks = m_depositGroupsByEntity.Sum((x) => x.Value.InitialTasks); int totalProcessedTasks = m_depositGroupsByEntity.Sum((x) => x.Value.ProcessedTasks); float scanProgress = 0f; if (totalInitialTasks != 0) { scanProgress = (float)totalProcessedTasks / (float)totalInitialTasks; } if (scanProgress != m_scanProgress) { m_scanProgress = scanProgress; MessageHub.SendToPlayerInSyncRange(new MessageOreDetectorScanProgress() { EntityId = m_block.EntityId, Progress = m_scanProgress }, m_block.GetPosition()); } StringBuilder oreListCache = new StringBuilder(); foreach (var item in m_depositGroupsByEntity.SelectMany((x) => x.Value.Materials.GetMaterialList()).GroupBy((x) => x.Material.MinedOre)) { oreListCache.Append($"- {item.Key}: {item.Sum((x) => x.Count)}\n"); } if (oreListCache != m_oreListCache) { m_oreListCache = oreListCache; MessageHub.SendToPlayerInSyncRange(new MessageOreDetectorScanComplete() { EntityId = m_block.EntityId, OreListCache = m_oreListCache.ToString() }, m_block.GetPosition()); } }
public async Task CanHandleMultipleWorkItemInstances() { const int workItemCount = 1000; using (var metrics = new InMemoryMetricsClient(loggerFactory: Log)) { using (var queue = new InMemoryQueue<WorkItemData>(retries: 0, retryDelay: TimeSpan.Zero, loggerFactory: Log)) { queue.AttachBehavior(new MetricsQueueBehavior<WorkItemData>(metrics, loggerFactory: Log)); using (var messageBus = new InMemoryMessageBus(Log)) { var handlerRegistry = new WorkItemHandlers(); var j1 = new WorkItemJob(queue, messageBus, handlerRegistry, Log); var j2 = new WorkItemJob(queue, messageBus, handlerRegistry, Log); var j3 = new WorkItemJob(queue, messageBus, handlerRegistry, Log); int errors = 0; var jobIds = new ConcurrentDictionary<string, int>(); handlerRegistry.Register<MyWorkItem>(async ctx => { var jobData = ctx.GetData<MyWorkItem>(); Assert.Equal("Test", jobData.SomeData); var jobWorkTotal = jobIds.AddOrUpdate(ctx.JobId, 1, (key, value) => value + 1); if (jobData.Index % 100 == 0) _logger.Trace("Job {jobId} processing work item #: {jobWorkTotal}", ctx.JobId, jobWorkTotal); for (int i = 0; i < 10; i++) await ctx.ReportProgressAsync(10 * i); if (RandomData.GetBool(1)) { Interlocked.Increment(ref errors); throw new Exception("Boom!"); } }); for (int i = 0; i < workItemCount; i++) await queue.EnqueueAsync(new MyWorkItem { SomeData = "Test", Index = i }, true); var completedItems = new List<string>(); object completedItemsLock = new object(); messageBus.Subscribe<WorkItemStatus>(status => { if (status.Progress == 100) _logger.Trace("Progress: {progress}", status.Progress); if (status.Progress < 100) return; lock (completedItemsLock) completedItems.Add(status.WorkItemId); }); var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(10)); var tasks = new List<Task> { Task.Run(async () => { await j1.RunUntilEmptyAsync(cancellationTokenSource.Token); cancellationTokenSource.Cancel(); }, cancellationTokenSource.Token), Task.Run(async () => { await j2.RunUntilEmptyAsync(cancellationTokenSource.Token); cancellationTokenSource.Cancel(); }, cancellationTokenSource.Token), Task.Run(async () => { await j3.RunUntilEmptyAsync(cancellationTokenSource.Token); cancellationTokenSource.Cancel(); }, cancellationTokenSource.Token) }; try { await Task.WhenAll(tasks); await SystemClock.SleepAsync(100); } catch (TaskCanceledException) {} _logger.Info("Completed: {completedItems} Errors: {errors}", completedItems.Count, errors); Assert.Equal(workItemCount, completedItems.Count + errors); Assert.Equal(3, jobIds.Count); Assert.Equal(workItemCount, jobIds.Sum(kvp => kvp.Value)); } } } }
public long CurrentMessageCount() { return(_items.Sum(kvp => kvp.Value.Count)); }
public override void Install(ModuleManager manager) { var client = NadekoBot.client; manager.CreateCommands("!m", cgb => { cgb.AddCheck(Classes.Permissions.PermissionChecker.Instance); commands.ForEach(cmd => cmd.Init(cgb)); cgb.CreateCommand("n") .Alias("next") .Description("Goes to the next song in the queue.") .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } await musicPlayers[e.Server].LoadNextSong(); }); cgb.CreateCommand("s") .Alias("stop") .Description("Completely stops the music and unbinds the bot from the channel and cleanes up files.") .Do(e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } musicPlayers[e.Server].Stop(); }); cgb.CreateCommand("p") .Alias("pause") .Description("Pauses or Unpauses the song") .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } if (musicPlayers[e.Server].TogglePause()) { await e.Channel.SendMessage("π΅`Music player paused.`"); } else { await e.Channel.SendMessage("π΅`Music player unpaused.`"); } }); cgb.CreateCommand("q") .Alias("yq") .Description("Queue a song using keywords or link. Bot will join your voice channel. **You must be in a voice channel**.\n**Usage**: `!m q Dream Of Venice`") .Parameter("query", ParameterType.Unparsed) .Do(async e => await QueueSong(e, e.GetArg("query"))); cgb.CreateCommand("lq") .Alias("ls").Alias("lp") .Description("Lists up to 10 currently queued songs.") .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { await e.Channel.SendMessage("π΅ No active music player."); return; } var player = musicPlayers[e.Server]; string toSend = "π΅ **" + player.SongQueue.Count + "** `videos currently queued.` "; if (player.SongQueue.Count >= 25) { toSend += "**Song queue is full!**\n"; } await e.Channel.SendMessage(toSend); int number = 1; await e.Channel.SendMessage(string.Join("\n", player.SongQueue.Take(10).Select(v => $"`{number++}.` {v.FullPrettyName}"))); }); cgb.CreateCommand("np") .Alias("playing") .Description("Shows the song currently playing.") .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; await e.Channel.SendMessage($"π΅`Now Playing` {player.CurrentSong.FullPrettyName}"); }); cgb.CreateCommand("vol") .Description("Sets the music volume 0-150%") .Parameter("val", ParameterType.Required) .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; var arg = e.GetArg("val"); int volume; if (!int.TryParse(arg, out volume)) { await e.Channel.SendMessage("Volume number invalid."); return; } volume = player.SetVolume(volume); await e.Channel.SendMessage($"π΅ `Volume set to {volume}%`"); }); cgb.CreateCommand("dv") .Alias("defvol") .Description("Sets the default music volume when music playback is started (0-100). Does not persist through restarts.\n**Usage**: !m dv 80") .Parameter("val", ParameterType.Required) .Do(async e => { var arg = e.GetArg("val"); float volume; if (!float.TryParse(arg, out volume) || volume < 0 || volume > 100) { await e.Channel.SendMessage("Volume number invalid."); return; } musicVolumes.AddOrUpdate(e.Server.Id, volume / 100, (key, newval) => volume / 100); await e.Channel.SendMessage($"π΅ `Default volume set to {volume}%`"); }); cgb.CreateCommand("min").Alias("mute") .Description("Sets the music volume to 0%") .Do(e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; player.SetVolume(0); }); cgb.CreateCommand("max") .Description("Sets the music volume to 100% (real max is actually 150%).") .Do(e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; player.SetVolume(100); }); cgb.CreateCommand("half") .Description("Sets the music volume to 50%.") .Do(e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; player.SetVolume(50); }); cgb.CreateCommand("sh") .Description("Shuffles the current playlist.") .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; if (player.SongQueue.Count < 2) { await e.Channel.SendMessage("Not enough songs in order to perform the shuffle."); return; } player.SongQueue.Shuffle(); await e.Channel.SendMessage("π΅ `Songs shuffled.`"); }); bool setgameEnabled = false; Timer setgameTimer = new Timer(); setgameTimer.Interval = 20000; setgameTimer.Elapsed += (s, e) => { int num = musicPlayers.Where(kvp => kvp.Value.CurrentSong != null).Count(); NadekoBot.client.SetGame($"{num} songs".SnPl(num) + $", {musicPlayers.Sum(kvp => kvp.Value.SongQueue.Count())} queued"); }; cgb.CreateCommand("setgame") .Description("Sets the game of the bot to the number of songs playing.**Owner only**") .Do(async e => { if (NadekoBot.OwnerID != e.User.Id) { return; } setgameEnabled = !setgameEnabled; if (setgameEnabled) { setgameTimer.Start(); } else { setgameTimer.Stop(); } await e.Channel.SendMessage("`Music status " + (setgameEnabled ? "enabled`" : "disabled`")); }); cgb.CreateCommand("pl") .Description("Queues up to 25 songs from a youtube playlist specified by a link, or keywords.") .Parameter("playlist", ParameterType.Unparsed) .Do(async e => { if (e.User.VoiceChannel?.Server != e.Server) { await e.Channel.SendMessage("π’ You need to be in the voice channel on this server."); return; } var ids = await SearchHelper.GetVideoIDs(await SearchHelper.GetPlaylistIdByKeyword(e.GetArg("playlist"))); //todo TEMPORARY SOLUTION, USE RESOLVE QUEUE IN THE FUTURE var msg = await e.Channel.SendMessage($"π΅ `Attempting to queue **{ids.Count}** songs".SnPl(ids.Count) + "...`"); foreach (var id in ids) { Task.Run(async() => await QueueSong(e, id, true)).ConfigureAwait(false); await Task.Delay(150); } msg?.Edit("π΅ `Playlist queue complete.`"); }); cgb.CreateCommand("radio").Alias("ra") .Description("Queues a direct radio stream from a link.") .Parameter("radio_link", ParameterType.Required) .Do(async e => { if (e.User.VoiceChannel?.Server != e.Server) { await e.Channel.SendMessage("π’ You need to be in the voice channel on this server."); return; } await QueueSong(e, e.GetArg("radio_link"), radio: true); }); cgb.CreateCommand("mv") .Description("Moves the bot to your voice channel. (works only if music is already playing)") .Do(async e => { MusicControls mc; if (e.User.VoiceChannel == null || e.User.VoiceChannel.Server != e.Server || !musicPlayers.TryGetValue(e.Server, out mc)) { return; } mc.VoiceChannel = e.User.VoiceChannel; mc.VoiceClient = await mc.VoiceChannel.JoinAudio(); }); cgb.CreateCommand("rm") .Description("Removes a song by a # from the queue or 'all' to remove whole queue.") .Parameter("num", ParameterType.Required) .Do(async e => { var arg = e.GetArg("num"); MusicControls mc; if (!musicPlayers.TryGetValue(e.Server, out mc)) { return; } if (arg?.ToLower() == "all") { mc.SongQueue?.Clear(); await e.Channel.SendMessage($"π΅Queue cleared!"); return; } int num; if (!int.TryParse(arg, out num)) { return; } if (num <= 0 || num > mc.SongQueue.Count) { return; } mc.SongQueue.RemoveAt(num - 1); await e.Channel.SendMessage($"π΅Song at position `{num}` has been removed."); }); cgb.CreateCommand("debug") .Description("Writes some music data to console. **BOT OWNER ONLY**") .Do(e => { if (NadekoBot.OwnerID != e.User.Id) { return; } var output = "SERVER_NAME---SERVER_ID-----USERCOUNT----QUEUED\n" + string.Join("\n", musicPlayers.Select(kvp => kvp.Key.Name + "--" + kvp.Key.Id + " --" + kvp.Key.Users.Count() + "--" + kvp.Value.SongQueue.Count)); Console.WriteLine(output); }); }); }
public int GetPlottingCapacity() => ActivePlotters.Sum(x => x.Value.Capacity);
void IModule.Install(ModuleManager manager) { _manager = manager; _client = manager.Client; manager.CreateCommands("music", cgb => { //queue all more complex commands // commands.ForEach(cmd => cmd.Init(cgb)); cgb.CreateCommand("next") .Description("Goes to the next song in the queue.") .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } await musicPlayers[e.Server].LoadNextSong(); }); cgb.CreateCommand("stop") .Description("Completely stops the music and unbinds the bot from the channel and cleanes up files.") .Do(e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } musicPlayers[e.Server].Stop(); }); cgb.CreateCommand("pause") .Description("Pauses or Unpauses the song") .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } if (musicPlayers[e.Server].TogglePause()) { await _client.Reply(e, "π΅`Music player paused.`"); } else { await _client.Reply(e, "π΅`Music player unpaused.`"); } }); cgb.CreateCommand("q") .Alias("yq") .Description("Queue a song using keywords or link. Bot will join your voice channel. **You must be in a voice channel**.\n**Usage**: `!m q Dream Of Venice`") .Parameter("query", ParameterType.Unparsed) .Do(async e => await QueueSong(e, e.GetArg("query"))); cgb.CreateCommand("listq") .Alias("ls").Alias("lp") .Description("Lists up to 10 currently queued songs.") .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { await _client.Reply(e, "π΅ No active music player."); return; } var player = musicPlayers[e.Server]; string toSend = "π΅ **" + player.SongQueue.Count + "** `videos currently queued.` "; if (player.SongQueue.Count >= 25) { toSend += "**Song queue is full!**\n"; } await _client.Reply(e, toSend); int number = 1; await _client.Reply(e, string.Join("\n", player.SongQueue.Take(10).Select(v => $"`{number++}.` {v.FullPrettyName}"))); }); cgb.CreateCommand("nowplaying") .Alias("playing") .Description("Shows the song currently playing.") .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; await _client.Reply(e, $"π΅`Now Playing` {player.CurrentSong.FullPrettyName}"); }); cgb.CreateCommand("vol") .Description("Sets the music volume 0-150%") .Parameter("val", ParameterType.Required) .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; var arg = e.GetArg("val"); int volume; if (!int.TryParse(arg, out volume)) { await _client.Reply(e, "Volume number invalid."); return; } volume = player.SetVolume(volume); await _client.Reply(e, $"π΅ `Volume set to {volume}%`"); }); cgb.CreateCommand("min").Alias("mute") .Description("Sets the music volume to 0%") .Do(e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; player.SetVolume(0); }); cgb.CreateCommand("max") .Description("Sets the music volume to 100% (real max is actually 150%).") .Do(e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; player.SetVolume(100); }); cgb.CreateCommand("half") .Description("Sets the music volume to 50%.") .Do(e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; player.SetVolume(50); }); cgb.CreateCommand("shuffle") .Description("Shuffles the current playlist.") .Do(async e => { if (musicPlayers.ContainsKey(e.Server) == false) { return; } var player = musicPlayers[e.Server]; if (player.SongQueue.Count < 2) { await _client.Reply(e, "Not enough songs in order to perform the shuffle."); return; } player.SongQueue.Shuffle(); await _client.Reply(e, "π΅ `Songs shuffled.`"); }); bool setgameEnabled = true; Timer setgameTimer = new Timer(); setgameTimer.Interval = 20000; setgameTimer.Elapsed += (s, e) => { int num = musicPlayers.Where(kvp => kvp.Value.CurrentSong != null).Count(); _client.SetGame($"{num} songs".SnPl(num) + $", {musicPlayers.Sum(kvp => kvp.Value.SongQueue.Count())} queued"); }; cgb.CreateCommand("setgame") .Description("Sets the game of the bot to the number of songs playing.**Owner only**") .Do(async e => { setgameEnabled = !setgameEnabled; if (setgameEnabled) { setgameTimer.Start(); } else { setgameTimer.Stop(); } await _client.Reply(e, "`Music status " + (setgameEnabled ? "enabled`" : "disabled`")); }); cgb.CreateCommand("playlist") .Description("Queues up to 25 songs from a youtube playlist specified by a link, or keywords.") .Parameter("playlist", ParameterType.Unparsed) .Do(async e => { if (e.User.VoiceChannel?.Server != e.Server) { await _client.Reply(e, "π’ You need to be in the voice channel on this server."); return; } var ids = await Searches.GetVideoIDs(await Searches.GetPlaylistIdByKeyword(e.GetArg("playlist"))); //todo TEMPORARY SOLUTION, USE RESOLVE QUEUE IN THE FUTURE await _client.Reply(e, $"π΅ Attempting to queue **{ids.Count}** songs".SnPl(ids.Count)); foreach (var id in ids) { Task.Run(async() => await QueueSong(e, id, true)).ConfigureAwait(false); await Task.Delay(150); } await _client.Reply(e, $"π΅ Attempting to queue **{ids.Count}** songs".SnPl(ids.Count)); }); /* cgb.CreateCommand("radio").Alias("ra") * .Description("Queues a direct radio stream from a link.") * .Parameter("radio_link", ParameterType.Required) * .Do(async e => { * if (e.User.VoiceChannel?.Server != e.Server) { * await _client.Reply(e, "π’ You need to be in the voice channel on this server."); * return; * } * await QueueSong(e, e.GetArg("radio_link"),_client, radio: true); * });*/ cgb.CreateCommand("debug") .Description("Writes some music data to console.") .Do(e => { var output = "SERVER_NAME---SERVER_ID-----USERCOUNT----QUEUED\n" + string.Join("\n", musicPlayers.Select(kvp => kvp.Key.Name + "--" + kvp.Key.Id + " --" + kvp.Key.Users.Count() + "--" + kvp.Value.SongQueue.Count)); Console.WriteLine(output); }); }); }
public int GetTotalActiveTasks() { return(activeSynchronizations.Sum(queue => queue.Value.Count)); }
public int GetTotalPendingTasks() { return(pendingSynchronizations.Sum(queue => queue.Value.Count)); }
public int GetTotalViews() => _viewsPerDate.Sum(t => t.Value);
public void ShowModificationImportWindowDialog(string modificationPath) { var rootNodeViewModel = modificationImportViewModelFactory.FromDirectory(modificationPath); var solution = riotSolutionLoader.Load(@"V:\Riot Games\League of Legends\RADS", RiotProjectType.AirClient | RiotProjectType.GameClient); var airResolver = new Resolver(solution.ProjectsByType[RiotProjectType.AirClient].ReleaseManifest.Root); var gameResolver = new Resolver(solution.ProjectsByType[RiotProjectType.GameClient].ReleaseManifest.Root); var fileNodes = rootNodeViewModel.EnumerateFileNodes().ToArray(); var importWindow = new ModificationImportWindow(); var modificationImportViewModel = new ModificationImportViewModel(this, importWindow, rootNodeViewModel); modificationImportViewModel.ModificationFriendlyName = fileSystemProxy.GetDirectoryInfo(modificationPath).Name; importWindow.DataContext = modificationImportViewModel; new Thread(() => { foreach (var fileNode in fileNodes) { var path = fileNode.Path; var airResolution = airResolver.Resolve(path); if (airResolution.Any()) { fileNode.ResolutionPath = airResolution.First().GetPath(); fileNode.ResolutionState = ResolutionState.ResolutionSuccessful; } else { var gameResolutions = gameResolver.Resolve(path); if (gameResolutions.Any()) { fileNode.ResolutionPath = gameResolutions.First().GetPath(); fileNode.ResolutionState = ResolutionState.ResolutionSuccessful; } else { fileNode.ResolutionState = ResolutionState.ResolutionFailed; } } } LeagueModificationCategory modificationType = LeagueModificationCategory.Other; if (fileNodes.Any(node => node.ResolutionState == ResolutionState.ResolutionSuccessful)) { var modificationTypeCounts = new ConcurrentDictionary<LeagueModificationCategory, int>(); foreach (var file in fileNodes) { if (file.ResolutionState == ResolutionState.ResolutionSuccessful) { if (file.ResolutionPath.IndexOf("DATA/Characters", StringComparison.OrdinalIgnoreCase) != -1 || file.ResolutionPath.IndexOf("assets/images/champions", StringComparison.OrdinalIgnoreCase) != -1) { if (file.ResolutionPath.IndexOf("ward", StringComparison.OrdinalIgnoreCase) != -1) { modificationTypeCounts.AddOrUpdate(LeagueModificationCategory.Ward, 1, (existing, count) => count + 1); } else { modificationTypeCounts.AddOrUpdate(LeagueModificationCategory.Champion, 1, (existing, count) => count + 1); } } else if (file.ResolutionPath.IndexOf("LEVELS") != -1) { modificationTypeCounts.AddOrUpdate(LeagueModificationCategory.Map, 1, (existing, count) => count + 1); } else if (file.ResolutionPath.IndexOf("Menu", StringComparison.OrdinalIgnoreCase) != -1) { modificationTypeCounts.AddOrUpdate(LeagueModificationCategory.UserInterface, 1, (existing, count) => count + 1); } else { modificationTypeCounts.AddOrUpdate(LeagueModificationCategory.Other, 1, (existing, count) => count + 1); } } } var categorizationCounts = modificationTypeCounts.Sum(x => x.Value); var highestCategorization = modificationTypeCounts.MaxBy(key => key.Value, Comparer<int>.Default); if (highestCategorization.Value >= categorizationCounts * 2.0 / 3.0) { modificationType = modificationTypeCounts.MaxBy(key => key.Value, Comparer<int>.Default).Key; } Console.WriteLine("Highest categorization: " + highestCategorization.Key.Name); modificationTypeCounts.ForEach(x => Console.WriteLine(x.Key.Name + ": " + x.Value)); Application.Current.Dispatcher.BeginInvoke(DispatcherPriority.Send, new Action(() => { modificationImportViewModel.ModificationCategorization = modificationType; })); } }).Start(); importWindow.ShowDialog(); }