private int AddWireModels(IEnumerable <ISpanEventWireModel> wireModels) { var nodes = wireModels.Where(model => null != model) .Select(model => new PrioritizedNode <ISpanEventWireModel>(model)); return(_spanEvents.Add(nodes)); }
public override void Collect(CustomEventWireModel customEventWireModel) { _agentHealthReporter.ReportCustomEventCollected(); _readerWriterLockSlim.EnterReadLock(); try { _customEvents.Add(new PrioritizedNode <CustomEventWireModel>(customEventWireModel)); } finally { _readerWriterLockSlim.ExitReadLock(); } }
private void AddEventToCollection(ErrorEventWireModel errorEvent) { if (errorEvent.IsSynthetics && _syntheticsErrorEvents.Count < SyntheticsHeader.MaxEventCount) { _syntheticsErrorEvents.Add(errorEvent); } else { _errorEvents.Add(new PrioritizedNode <ErrorEventWireModel>(errorEvent)); } }
public void SingleThreadTiming() { const int count = 1000000; var target = new ConcurrentPriorityQueue <int>(2); var watcher = new Stopwatch(); watcher.Start(); for (int i = 0; i < count; i++) { target.Add(i); } watcher.Stop(); Assert.AreEqual(count, target.Count); Console.WriteLine("Add {0} elements: {1}", count, watcher.Elapsed); watcher.Restart(); // ReSharper disable once UnusedVariable var enumerator = target.GetEnumerator(); watcher.Stop(); Console.WriteLine("Get enumerator for {0} elements: {1}", count, watcher.Elapsed); watcher.Restart(); for (int i = 0; i < count; i++) { target.Take(); } watcher.Stop(); Assert.AreEqual(0, target.Count); Console.WriteLine("Take {0} elements: {1}", count, watcher.Elapsed); watcher.Start(); for (int i = 0; i < 2 * count; i++) { target.Add(i); } watcher.Stop(); Assert.AreEqual(2 * count, target.Count); Console.WriteLine("Add twice the capacity of {0} elements: {1}", count, watcher.Elapsed); }
public void Add() { // Create a new priority queue. ConcurrentPriorityQueue <int> queue = new ConcurrentPriorityQueue <int>(); // Ensure that the queue is empty. Assert.That(queue.Count, Is.EqualTo(0)); // Call Add() to insert a new element to the queue as a KeyValuePair. queue.Add(new PriorityValuePair <int>(1.0, 2)); // Expect a value of 2 on the first item to be removed after adding it. Assert.That(queue.Dequeue().Value, Is.EqualTo(2)); }
public void MaybeAddSearch(SearchState searchState) { if (_seenCubes.TryAdd(searchState.Cube, searchState.MovesSoFar.Count)) { DepthCubes.Add(searchState); return; } while (true) { if (!_seenCubes.TryGetValue(searchState.Cube, out var previousMoves) || previousMoves <= searchState.MovesSoFar.Count) { return; //Previous solution is as good as or better } if (!_seenCubes.TryUpdate(searchState.Cube, searchState.MovesSoFar.Count, previousMoves)) { continue; //Failure updating - try again } DepthCubes.Add(searchState); return; } }
public void ToArray() { ConcurrentPriorityQueue <int> target = new ConcurrentPriorityQueue <int>(); const int count = 10; for (int i = 0; i < count; i++) { target.Add(i); } var result = target.ToArray(); // Priority queue is max-based so greater items comes first for (int i = 0; i < count; i++) { Assert.AreEqual(count - i - 1, result[i]); } }
public async Task Xy2(Area area, int line) { var newLine = line / 5; for (int x = 0; x < 5; x++) { for (int y = 0; y < 5; y++) { var posX = area.PosX.Value + (x * newLine); var posY = area.PosY.Value + (y * newLine); await this.Explore(new Area(posX, posY, newLine, newLine)).ContinueWith((result) => { if (newLine == 3 && result.Result.Amount > 0) { exploreQueue.Add(result.Result); } }); } } }
public void MultiThreadEnqueue() { const int capacity = 1000000; const int threadsCount = 100; const int count = capacity / threadsCount; var target = new ConcurrentPriorityQueue<DateTime>(); var execStats = new ExecWithStats[threadsCount]; var watcher = new Stopwatch(); // several threads enqueue elements watcher.Start(); Parallel.For(0, threadsCount, index => { execStats[index] = new ExecWithStats(string.Format("Add {0}", count), count, () => target.Add(new DateTime())); execStats[index].Exec(); }); watcher.Stop(); Assert.AreEqual(capacity, target.Count); Console.WriteLine("{0} threads each enqueue {1} elements. total time: {2}\n", threadsCount, count, watcher.Elapsed); ExecWithStats.OutputStatsSummary(execStats); // several threads dequeue elements watcher.Start(); Parallel.For(0, threadsCount, index => { execStats[index] = new ExecWithStats(string.Format("Take {0}", count), count, () => target.Take()); execStats[index].Exec(); }); watcher.Stop(); Assert.AreEqual(0, target.Count); Console.WriteLine("\n{0} threads each dequeue {1} elements. total time: {2}\n", threadsCount, count, watcher.Elapsed); ExecWithStats.OutputStatsSummary(execStats); }
public void FunctionsAsNormalList_ForSingleThreadedAccess() { // Because nothing interesting happens when the reservoir's item count is below the size limit, it seems reasonable to just wrap all of the basic list API tests into one test var eventsToAdd = new[] { Create(0.3f), Create(0.2f), Create(0.1f), }; // Add foreach (var ev in eventsToAdd) { ConcurrentPriorityQueue.Add(ev); } // GetEnumerator var index = 0; var nongenericEnumerator = ((IEnumerable)ConcurrentPriorityQueue).GetEnumerator(); while (index < eventsToAdd.Length && nongenericEnumerator.MoveNext()) { Assert.AreEqual(eventsToAdd[index++], nongenericEnumerator.Current); } Assert.AreEqual(eventsToAdd.Length, index); // Count Assert.AreEqual(ConcurrentPriorityQueue.Count, eventsToAdd.Length); // CopyTo var actualEvents = ConcurrentPriorityQueue.Select(node => node.Data).ToArray(); var expectedEvents = eventsToAdd.Select(node => node.Data).ToArray(); Assert.That(actualEvents, Is.EquivalentTo(expectedEvents)); // Contains Assert.True(eventsToAdd.All(ConcurrentPriorityQueue.Contains)); // Clear ConcurrentPriorityQueue.Clear(); Assert.AreEqual(0, ConcurrentPriorityQueue.Count); Assert.False(eventsToAdd.Any(ConcurrentPriorityQueue.Contains)); }
public async Task GoDig(Report report) { var initX = report.Area.PosX.Value; var sizeX = report.Area.SizeX; var initY = report.Area.PosY.Value; var sizeY = report.Area.SizeY; var list = new List <Task <Report> >(); for (int x = initX; x < (initX + sizeX); x++) { list.Add(this.Explore(new Area(x, initY, 1, sizeY))); } Task.WaitAll(list.ToArray()); foreach (var result in list) { if (result.Result.Amount > 0) { var left = result.Result.Amount; for (int y = initY; y < (initY + sizeY) && left > 0; y++) { var explore = await this.Explore(new Area(result.Result.Area.PosX, y, 1, 1)); var depth = 1; if (explore.Amount > 0) { var license = new License(0, 0, 0); try { await _digSignal.WaitAsync(); license = await UpdateLicense(); while (depth <= 10 && left > 0 && explore.Amount > 0) { if (license.DigUsed >= license.DigAllowed) { license = await UpdateLicense(); } var result1 = await Dig(new Dig(license.Id.Value, result.Result.Area.PosX, y, depth)); license.DigUsed += 1; if (result1 != null) { explore.Amount -= result1.Count; left -= result1.Count; foreach (var treasure in result1) { treasureQueue.Add(new Gold() { Money = treasure, Depth = depth }); } } depth += 1; } } finally { if (license.DigUsed < license.DigAllowed) { licenses.Add(license); } _digSignal.Release(); } } } } } }
public void RaceWithStats() { const int capacity = 1000000; const int threadsCount = 100; const int count = capacity / threadsCount; var target = new ConcurrentPriorityQueue <DateTime>(); var execStats = new List <ExecWithStats>(); var threadWait = new CountdownEvent(threadsCount); // odd threads will enqueue elements, while even threads will dequeue // obviously there will be a race condition and especially in the beginning dequeue will throw, because queue will often be empty // the total number of exceptions on dequeue threads will correspond the the number of items left in the queue for (var i = 0; i < threadsCount; i++) { ExecWithStats exec; if (i % 2 != 0) { exec = new ExecWithStats(string.Format("Add {0} elements", count), count, () => target.Add(new DateTime()), threadWait); } else { exec = new ExecWithStats(string.Format("Take {0} elements", count), count, () => target.Take(), threadWait); } execStats.Add(exec); var thread = new Thread(() => exec.Exec()); thread.Start(); } // Wait for all threads in pool to calculate. threadWait.Wait(); // Output stats summary ExecWithStats.OutputStatsSummary(execStats); // Output queue state Console.WriteLine("Queue count:{0}, capacity:{1}", target.Count, target.Capacity); // Un-comment for a detailed list of stats //Console.WriteLine("---------------------"); //foreach (var execStat in execStats) //{ // var stats = execStat.GetStats(); // Console.WriteLine("Name:{0}, Min: {1}, Median: {2}, Max {3}, Exceptions: {4}", stats.Name, stats.Min, stats.Med, stats.Max, stats.ExceptionsCount); //} }
public void MultiThreadEnqueue() { const int capacity = 1000000; const int threadsCount = 100; const int count = capacity / threadsCount; var target = new ConcurrentPriorityQueue <DateTime>(); var execStats = new ExecWithStats[threadsCount]; var watcher = new Stopwatch(); // several threads enqueue elements watcher.Start(); Parallel.For(0, threadsCount, index => { execStats[index] = new ExecWithStats(string.Format("Add {0}", count), count, () => target.Add(new DateTime())); execStats[index].Exec(); }); watcher.Stop(); Assert.AreEqual(capacity, target.Count); Console.WriteLine("{0} threads each enqueue {1} elements. total time: {2}\n", threadsCount, count, watcher.Elapsed); ExecWithStats.OutputStatsSummary(execStats); // several threads dequeue elements watcher.Start(); Parallel.For(0, threadsCount, index => { execStats[index] = new ExecWithStats(string.Format("Take {0}", count), count, () => target.Take()); execStats[index].Exec(); }); watcher.Stop(); Assert.AreEqual(0, target.Count); Console.WriteLine("\n{0} threads each dequeue {1} elements. total time: {2}\n", threadsCount, count, watcher.Elapsed); ExecWithStats.OutputStatsSummary(execStats); }
public void RaceWithStats() { const int capacity = 1000000; const int threadsCount = 100; const int count = capacity / threadsCount; var target = new ConcurrentPriorityQueue<DateTime>(); var execStats = new List<ExecWithStats>(); var threadWait = new CountdownEvent(threadsCount); // odd threads will enqueue elements, while even threads will dequeue // obviously there will be a race condition and especially in the beginning dequeue will throw, because queue will often be empty // the total number of exceptions on dequeue threads will correspond the the number of items left in the queue for (var i = 0; i < threadsCount; i++) { ExecWithStats exec; if (i % 2 != 0) { exec = new ExecWithStats(string.Format("Add {0} elements", count), count, () => target.Add( new DateTime()), threadWait); } else { exec = new ExecWithStats(string.Format("Take {0} elements", count), count, () => target.Take(), threadWait); } execStats.Add(exec); var thread = new Thread(() => exec.Exec()); thread.Start(); } // Wait for all threads in pool to calculate. threadWait.Wait(); // Output stats summary ExecWithStats.OutputStatsSummary(execStats); // Output queue state Console.WriteLine("Queue count:{0}, capacity:{1}", target.Count, target.Capacity); // Un-comment for a detailed list of stats //Console.WriteLine("---------------------"); //foreach (var execStat in execStats) //{ // var stats = execStat.GetStats(); // Console.WriteLine("Name:{0}, Min: {1}, Median: {2}, Max {3}, Exceptions: {4}", stats.Name, stats.Min, stats.Med, stats.Max, stats.ExceptionsCount); //} }
public void SingleThreadTiming() { const int count = 1000000; var target = new ConcurrentPriorityQueue<int>(2); var watcher = new Stopwatch(); watcher.Start(); for (int i = 0; i < count; i++) { target.Add(i); } watcher.Stop(); Assert.AreEqual(count, target.Count); Console.WriteLine("Add {0} elements: {1}", count, watcher.Elapsed); watcher.Restart(); // ReSharper disable once UnusedVariable var enumerator = target.GetEnumerator(); watcher.Stop(); Console.WriteLine("Get enumerator for {0} elements: {1}", count, watcher.Elapsed); watcher.Restart(); for (int i = 0; i < count; i++) { target.Take(); } watcher.Stop(); Assert.AreEqual(0, target.Count); Console.WriteLine("Take {0} elements: {1}", count, watcher.Elapsed); watcher.Start(); for (int i = 0; i < 2 * count; i++) { target.Add(i); } watcher.Stop(); Assert.AreEqual(2 * count, target.Count); Console.WriteLine("Add twice the capacity of {0} elements: {1}", count, watcher.Elapsed); }