static void Main(string[] args) { /* * Subscription Details * Max delivery count: 10 * Lock Duration: 2minutes */ var receiver = CreateMessageReceiver(); var options = new MessageHandlerOptions(ExceptionReceivedHandler) { MaxConcurrentCalls = 5, AutoComplete = false, }; receiver.RegisterMessageHandler(async(message, cancellation) => { try { messageRefs.Add(new Tuple <string, string>(message.MessageId.ToString(), message.SystemProperties.LockToken)); throw new Exception("something bad happens..."); await receiver.CompleteAsync(message.SystemProperties.LockToken); } catch (Exception) { //we do not want to return the message immediately (receiver.AbandonAsync) //let the message lock (2min) expire before this message being re-processed } }, options); while (true) //monitor { Console.WriteLine($"Message read: {messageRefs.Count}"); if (MONITOR_VERBOSE) { Console.WriteLine( String.Join( Environment.NewLine, messageRefs .GroupBy(x => x.Item1) .OrderByDescending(x => x.Count()) .Take(MONITOR_TOP_TAKE) .Select(x => $"Id: {x.Key} | Locks: {x.Count()}"))); } Thread.Sleep(5000); } }
public virtual async Task <List <DiscoveredPlace> > DiscoverOnHere(IEnumerable <KmlPlacemark> placemarks, string language, IDiscoveringProgress progressTracker, CancellationToken cancellationToken) { var result = new ConcurrentBag <DiscoveredPlace>(); await placemarks.ForEachAsync(DEGREE_OF_PARALLELISM_PER_SERVICE, async (placemark) => { if (cancellationToken.IsCancellationRequested) { return; } var venue = await _here.LookupMatchingVenue(placemark, language, cancellationToken); if (venue != null) { result.Add(new DiscoveredPlace { Venue = venue, AttachedToPlacemark = placemark }); _logger.Info($"Found a matching venue on HERE: {venue.Title}"); } progressTracker.ReportItemProcessed(); }); return(result .GroupBy(x => x.Venue.Id) .Select(x => x.FirstOrDefault(dp => dp.IsForPlacemark) ?? x.First()) .ToList()); }
public async Task FlushAsync() { foreach (var group in _items.GroupBy(x => x.At)) { await SaveReportFileAsync(group.Key, group.ToArray()); } }
public void DCAwareRoundRobinPolicyCachesLocalNodes() { var hostList = new List <Host> { TestHelper.CreateHost("0.0.0.1", "dc1"), TestHelper.CreateHost("0.0.0.2", "dc2"), TestHelper.CreateHost("0.0.0.3", "dc1"), TestHelper.CreateHost("0.0.0.4", "dc2"), TestHelper.CreateHost("0.0.0.5", "dc1"), TestHelper.CreateHost("0.0.0.6", "dc2"), TestHelper.CreateHost("0.0.0.7", "dc1"), TestHelper.CreateHost("0.0.0.8", "dc2"), TestHelper.CreateHost("0.0.0.9", "dc1"), TestHelper.CreateHost("0.0.0.10", "dc2") }; const string localDc = "dc1"; var clusterMock = new Mock <ICluster>(); clusterMock .Setup(c => c.AllHosts()) .Returns(hostList); //Initialize the balancing policy var policy = new DCAwareRoundRobinPolicy(localDc, 1); policy.Initialize(clusterMock.Object); var instances = new ConcurrentBag <object>(); Action action = () => instances.Add(policy.GetHosts()); TestHelper.ParallelInvoke(action, 100); Assert.AreEqual(1, instances.GroupBy(i => i.GetHashCode()).Count()); }
public int VMCSScan() { if (Phase >= 2 && OverRidePhase) { return(VMCSs.Count()); } scan.ScanMode = PTType.VMCS; //scan.VMCSScanSet = (from dp in Processes // group dp by dp.CR3Value into CR3Masters // select new KeyValuePair<long, DetectedProc>(CR3Masters.Key, CR3Masters.First())).AsParallel(); scan.VMCSScanSet = Processes.GroupBy(p => p.CR3Value).Select(pg => pg.First()).ToArray(); var rv = scan.Analyze(); foreach (var vm in scan.HVLayer) { VMCSs.Add(vm); } Phase = 3; return(rv); }
private void ScrollAll(string index, int size, int numberOfShards, int numberOfDocuments) { var seenDocuments = 0; var seenSlices = new ConcurrentBag <int>(); Client.ScrollAll <SmallObject>("1m", numberOfShards, s => s .MaxDegreeOfParallelism(numberOfShards / 2) .Search(search => search .Size(size / 2) .Index(index) .MatchAll() ) ) .Wait(TimeSpan.FromMinutes(5), r => { seenSlices.Add(r.Slice); Interlocked.Add(ref seenDocuments, r.SearchResponse.Hits.Count); }); seenDocuments.Should().Be(numberOfDocuments); var groups = seenSlices.GroupBy(s => s).ToList(); groups.Count.Should().Be(numberOfShards); groups.Should().OnlyContain(g => g.Count() > 1); }
public void Commit() { if (disposed) { return; } if (Guid.Empty.Equals(transactionId)) { return; } var instanceEventsChains = unsavedEvents.GroupBy(e => e.ResourceInstanceId) .Select(group => group.OrderBy(e => e.MutationEvent.BaseVersion)).ToList(); lock (locker) { foreach (var instanceEventsChain in instanceEventsChains) { var firstEvent = instanceEventsChain.First(); var resource = new Resource() { TypeId = firstEvent.ResourceTypeId, InstanceId = firstEvent.ResourceInstanceId }; GetEventsCollection(firstEvent.ResourceTypeId).InsertMany(instanceEventsChain); } } Dispose(true); }
static void Main(string[] args) { DateTime time = DateTime.Now; double ProgramStart = time.ToUniversalTime().Subtract(new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)).TotalMilliseconds; PerformWithoutPartitioner(100); Console.WriteLine("id главного потока " + Thread.CurrentThread.ManagedThreadId); var Threads = bag.GroupBy(p => p.thrID); foreach (var thread in Threads) { Console.WriteLine("Поток: " + thread.Key); var res = bag.Select(p => new { thrID = p.thrID, taskID = p.taskID }).Where(p => p.thrID == thread.Key).Distinct(); foreach (var item in res) { Console.WriteLine(" Задание: " + item.taskID); var minVal = bag.Select(f => new { msec1 = f.msec1, thrID = f.thrID, f.taskID }).Where((f => (f.thrID == thread.Key & f.taskID == item.taskID))).Min(s => s.msec1) - ProgramStart; var maxVal = bag.Select(f => new { msec2 = f.msec2, thrID = f.thrID, f.taskID }).Where((f => (f.thrID == thread.Key & f.taskID == item.taskID))).Max(s => s.msec2) - ProgramStart; Console.WriteLine(" Количество элементов в задании " + bag.Count(p => (p.taskID == item.taskID)) + " Начало " + minVal + " Конец " + maxVal); } } Console.ReadLine(); }
public static List <List <MethodDefinition> > TrimEnd(this IEnumerable <IEnumerable <MethodDefinition> > chains, Func <MethodDefinition, bool> shouldTrim) { var ret = new ConcurrentBag <List <MethodDefinition> >(); Parallel.ForEach(chains, c => { var trimmed = c.ToList(); trimmed.Reverse(); var take = trimmed.Count; var didBreak = false; for (int i = 0; i < trimmed.Count; i++) { if (shouldTrim(trimmed[i])) { take--; } else { didBreak = true; break; } } if (didBreak) { trimmed.Reverse(); ret.Add(trimmed.Take(take).ToList()); } }); return(ret.GroupBy(x => string.Join("", x)).Select(x => x.First()).ToList()); }
private async Task PersistResults(ConcurrentBag <Result> results) { var queryGroups = results.GroupBy(x => x.Query); foreach (var queryGroup in queryGroups) { var query = queryGroup.Key; var resultGroups = queryGroup.ToLookup( x => x.IsMatch, x => x.PackageIdentity); if (resultGroups[true].Any()) { await _queryService.AddQueryAsync(query.Name, query.CursorName); await _queryService.AddMatchesAsync(query.Name, resultGroups[true].ToList()); } if (resultGroups[false].Any()) { await _queryService.RemoveMatchesAsync(query.Name, resultGroups[false].ToList()); } } }
/// <summary>Get global list of anagrams.</summary> /// <remarks> /// Shall output total number found and time in millis. /// </remarks> /// <param name="dictionary">Dictionary to look N*N for.</param> /// <param name="prepare">Prepare word for search.</param> /// <param name="compare">Compare</param> /// <returns>List of anagrams found by word.</returns> internal static Dictionary <string, string[]> FindAllParallel <T>(IEnumerable <string> dictionary, Func <string, T> prepare, Func <T, string, bool> compare) { var stopwatch = new Stopwatch(); stopwatch.Start(); var anagrams = new ConcurrentBag <KeyValuePair <string, string> >(); Parallel.ForEach(dictionary, w1 => { var w1Len = w1.Length; T w1p = prepare(w1); Parallel.ForEach(dictionary, w2 => { if ( w1Len == w2.Length && compare(w1p, w2) && 0 != string.Compare(w1, w2, StringComparison.InvariantCultureIgnoreCase) ) { anagrams.Add(new KeyValuePair <string, string>(w1, w2)); } }); }); stopwatch.Stop(); Console.WriteLine(stopwatch.ElapsedMilliseconds); Console.WriteLine(anagrams.Count); var result = anagrams.GroupBy(kv => kv.Key, kv => kv.Value).ToDictionary(i => i.Key, i => i.ToArray()); return(result); }
public Dictionary <string, List <int> > Parse(IBuffer <ILineItem> inputbuffer) { bool _exit = false; Dictionary <string, List <int> > parseResult = new Dictionary <string, List <int> >(); while (!_exit) { if (inputbuffer.ParserMayWork) { var linesToProcess = new List <ILineItem>(); while (!inputbuffer.ConBag.IsEmpty && linesToProcess.Count < 30) { ILineItem itm; inputbuffer.ConBag.TryDequeue(out itm); linesToProcess.Add(itm); } var wordsLocations = new ConcurrentBag <ILineItem>(); Parallel.ForEach(linesToProcess, line => { Parse(line, wordsLocations); }); var res = wordsLocations.GroupBy(x => new { word = x.TextLine }).Select(x => new { word = x.Key.word, list = x.Select(c => c.Number) }) .OrderBy(x => x.word); foreach (var re in res) { List <int> positions; if (parseResult.TryGetValue(re.word, out positions)) { parseResult[re.word] = positions.Concat(re.list).ToList(); } else { parseResult.Add(re.word, re.list.ToList()); } } } if (inputbuffer.ConBag.IsEmpty && inputbuffer.LoadConpleted) { _exit = true; inputbuffer.ParserMayWork = false; } else if (inputbuffer.ConBag.IsEmpty) { inputbuffer.ReaderMayWork = true; inputbuffer.ParserMayWork = false; } if (!_exit) { Thread.Sleep(50); } } return(parseResult); }
public void SimpleTimer() { var singleValues = new ConcurrentBag <SingleMetricHolder>(); var summaryValues = new ConcurrentBag <SummaryMetricHolder>(); using (var factory = new MetricFactory()) { factory.AddCallback("callback1", 1, (object userState, string name, string[] labels, double?value, CancellationToken cancellationToken) => { singleValues.Add(new SingleMetricHolder { SampleTime = DateTime.UtcNow, Labels = labels, Name = name, UserState = userState, Value = value }); return(Task.CompletedTask); }, (object userState, string name, string[] labels, int?count, double?minimum, double?maximum, double?mean, double?standardDeviation, double?p50, double?p90, double?p95, double?p98, double?p99, CancellationToken cancellationToken) => { summaryValues.Add(new SummaryMetricHolder { SampleTime = DateTime.UtcNow, Labels = labels, Name = name, UserState = userState, Count = count, Minimum = minimum, Maximum = maximum, Mean = mean, StandardDeviation = standardDeviation, p50 = p50, p90 = p90, p95 = p95, p98 = p98, p99 = p99 }); return(Task.CompletedTask); }, "test"); // Added to force aggregated to be added while allowing for callback tracking factory.AddDebug("debug", 1, false); var summary = factory.CreateSummary("Metric1", "Test metric for #1", 5, true, new string[] { "label1", "label2" }); for (var index = 0; index < 1000; index++) { using (summary.Time()) { Task.Delay(10).GetAwaiter().GetResult(); } } Task.Delay(1000).GetAwaiter().GetResult(); } var groupings = summaryValues.GroupBy(item => item.Name).ToArray(); Assert.IsTrue(groupings.Length == 1, "Metric count not expected"); var start = 0; foreach (var item in groupings) { var itemList = item.OrderBy(item => item.SampleTime).ToArray(); var average = itemList.Average(item => item.p50); Assert.IsTrue(itemList.Length > 5, "More than 5 items were expected"); // Assert.IsTrue(average >= 10 && average <= 20, "Average was not expected when more than 5 values in frame"); start += itemList.Length; } Assert.IsTrue(singleValues.IsEmpty, "single value count not expected"); }
private async Task StartSimulation() { if (Simulator.SongData == null) { MessageBox.Show("楽曲を選んでください"); return; } if (Simulator.Unit == null) { MessageBox.Show("ユニットを選んでください"); return; } if (Runs < 1 || Runs > 1000000) { MessageBox.Show("試行回数は1から1,000,000までである必要があります"); return; } Note[] pattern = null; if (UtilizeActualPattern) { pattern = await new PatternProvider().GetPattern(Simulator.Song, Simulator.SongData.Difficulty, Simulator.SongData.Notes); if (pattern == null) { MessageBox.Show($"{Simulator.Song.Title}({Simulator.SongData.Difficulty})の譜面データが見つかりませんでした。"); return; } } SimulationCompleted = false; var results = new ConcurrentBag <SimulationResult>(); await Task.Run(() => Parallel.For(1, Runs + 1, i => results.Add(Simulator.StartSimulation(RandomFactory.Create(), i, pattern == null ? null : new Queue <Note>(pattern))))); MaxScore = results.Max(x => x.Score); MaxScorePerNote = results.Max(x => x.ScorePerNote); MinScore = results.Min(x => x.Score); MinScorePerNote = results.Min(x => x.ScorePerNote); AverageScore = (int)results.Average(x => x.Score); AverageScorePerNote = (int)results.Average(x => x.ScorePerNote); ScoreDistribution = results.GroupBy(x => (int)Math.Floor(x.Score / 10000.0)).OrderBy(x => x.Key).ToDictionary(x => x.Key, x => (double)x.Count() / results.Count); StandardDeviation = Math.Round(Math.Sqrt(results.Sum(x => Math.Pow(x.Score - AverageScore, 2))) / results.Count); int idx = 1; var duration = results.First().Duration; ActualTriggerRatio = Simulator.Unit.Slots.ToDictionary(s => $"スロット{idx++}", s => s == null ? 0 : results.SelectMany(x => x.TriggeredSkills).Where(x => x.Who == s).Count() / (results.Count * Math.Floor((duration - 1.0) / s.Skill.Interval))); SimulationResults = results.OrderBy(x => x.Id).Take(100).ToList(); SelectedResult = SimulationResults[0]; SimulationCompleted = true; }
public void SimpleCountSetMultiProvider() { var singleValues = new ConcurrentBag <SingleMetricHolder>(); var summaryValues = new ConcurrentBag <SummaryMetricHolder>(); var testValue = 123.5678; using (var factory = new MetricFactory()) { factory.AddCallback("callback1", 1, (object userState, string name, string[] labels, double?value, CancellationToken cancellationToken) => { singleValues.Add(new SingleMetricHolder { SampleTime = DateTime.UtcNow, Labels = labels, Name = name, UserState = userState, Value = value }); return(Task.CompletedTask); }, (object userState, string name, string[] labels, int?count, double?minimum, double?maximum, double?mean, double?standardDeviation, double?p50, double?p90, double?p95, double?p98, double?p99, CancellationToken cancellationToken) => { summaryValues.Add(new SummaryMetricHolder { SampleTime = DateTime.UtcNow, Labels = labels, Name = name, UserState = userState, Count = count, Minimum = minimum, Maximum = maximum, Mean = mean, StandardDeviation = standardDeviation, p50 = p50, p90 = p90, p95 = p95, p98 = p98, p99 = p99 }); return(Task.CompletedTask); }, "test"); factory.AddProvider("debug", new DebugMetricsProvider(1, true)); factory.AddProvider("trace", new TraceMetricsProvider(1, true)); factory.AddProvider("console", new ConsoleMetricsProvider(1, true)); var counter = factory.CreateCounter("Metric1", "Test metric for #1", true, new string[] { "label1", "label2" }); for (var index = 0; index < 500; index++) { counter.SetTo(testValue); if (index < 499) { Task.Delay(10).GetAwaiter().GetResult(); } } Task.Delay(1000).GetAwaiter().GetResult(); } var groupings = singleValues.GroupBy(item => item.Name).ToArray(); Assert.IsTrue(groupings.Length == 1, "Metric count not expected"); foreach (var item in groupings) { var itemList = item.OrderBy(item => item.SampleTime).ToArray(); var maxValue = itemList.Max(item => item.Value); Assert.IsTrue(itemList.Length >= 6, $"Single value count within groupings not within expected tolerance (value { itemList.Length })"); Assert.IsTrue(maxValue.HasValue, "Max value should not be null"); Assert.IsTrue(maxValue == testValue, "Maximum value not expected"); } Assert.IsTrue(summaryValues.Count == 0, "Summary value count not expected"); }
public async Task <IEnumerable <IInspectionResult> > FindIssuesAsync(RubberduckParserState state, CancellationToken token) { if (state == null || !state.AllUserDeclarations.Any()) { return(new IInspectionResult[] { }); } state.OnStatusMessageUpdate(RubberduckUI.CodeInspections_Inspecting); var config = _configService.LoadConfiguration(); UpdateInspectionSeverity(config); var allIssues = new ConcurrentBag <IInspectionResult>(); // Prepare ParseTreeWalker based inspections var parseTreeWalkResults = GetParseTreeResults(config, state); foreach (var parseTreeInspection in _inspections.OfType <IParseTreeInspection>().Where(inspection => inspection.Severity != CodeInspectionSeverity.DoNotShow)) { parseTreeInspection.SetResults(parseTreeWalkResults); } var inspections = _inspections.Where(inspection => inspection.Severity != CodeInspectionSeverity.DoNotShow) .Select(inspection => Task.Run(() => { token.ThrowIfCancellationRequested(); var inspectionResults = inspection.GetInspectionResults(); foreach (var inspectionResult in inspectionResults) { allIssues.Add(inspectionResult); } }, token)).ToList(); try { await Task.WhenAll(inspections); } catch (Exception e) { LogManager.GetCurrentClassLogger().Error(e); } var issuesByType = allIssues.GroupBy(issue => issue.GetType()) .ToDictionary(grouping => grouping.Key, grouping => grouping.ToList()); var results = issuesByType.Where(kv => kv.Value.Count <= AGGREGATION_THRESHOLD) .SelectMany(kv => kv.Value) .Union(issuesByType.Where(kv => kv.Value.Count > AGGREGATION_THRESHOLD) .Select(kv => new AggregateInspectionResult(kv.Value.OrderBy(i => i.QualifiedSelection).First(), kv.Value.Count))) .ToList(); state.OnStatusMessageUpdate(RubberduckUI.ResourceManager.GetString("ParserState_" + state.Status, CultureInfo.CurrentUICulture)); // should be "Ready" return(results); }
public void FactoryCreateCountIteration() { var singleValues = new ConcurrentBag <SingleMetricHolder>(); var summaryValues = new ConcurrentBag <SummaryMetricHolder>(); using (var factory = new MetricFactory()) { factory.AddCallback("callback1", 1, (object userState, string name, string[] labels, double?value, CancellationToken cancellationToken) => { singleValues.Add(new SingleMetricHolder { SampleTime = DateTime.UtcNow, Labels = labels, Name = name, UserState = userState, Value = value }); return(Task.CompletedTask); }, (object userState, string name, string[] labels, int?count, double?minimum, double?maximum, double?mean, double?standardDeviation, double?p50, double?p90, double?p95, double?p98, double?p99, CancellationToken cancellationToken) => { summaryValues.Add(new SummaryMetricHolder { SampleTime = DateTime.UtcNow, Labels = labels, Name = name, UserState = userState, Count = count, Minimum = minimum, Maximum = maximum, Mean = mean, StandardDeviation = standardDeviation, p50 = p50, p90 = p90, p95 = p95, p98 = p98, p99 = p99 }); return(Task.CompletedTask); }, "test"); factory.AddDebug("debug", 1, true); factory.AddTrace("trace", 1, true); factory.AddConsole("console", 1, true); var summary = factory.CreateSummary("Metric1", "Test metric for #1", 5, true, new string[] { "label1", "label2" }); for (var index = 0; index < 1500; index++) { summary.Observe(_doubleValues[index % _doubleValues.Length]); if (index < 1499) { Task.Delay(10).GetAwaiter().GetResult(); } } Task.Delay(1000).GetAwaiter().GetResult(); } var groupings = summaryValues.GroupBy(item => item.Name).ToArray(); Assert.IsTrue(groupings.Length == 1, "Metric count not expected"); var start = 0; foreach (var item in groupings) { var itemList = item.OrderBy(item => item.SampleTime).ToArray(); start += itemList.Length; } Assert.IsTrue(singleValues.IsEmpty, "single value count not expected"); }
public async ValueTask <(Status, IDictionary <FileType, IList <FileToken> >)> TokenizeAsync(Options options, DocFxConfig config) { var dirUri = options.DirectoryUri; var dir = options.ExplicitScope ? options.Directory : options.DocFxJsonDirectory; if (dir is null) { return(Status.Error, null); } var count = 0; Spinner.Start("Gathering files...", spinner => { spinner.Color = ConsoleColor.Blue; count = dir.EnumerateDirectories() .AsParallel() .SelectMany(d => d.EnumerateFiles("*.*", SearchOption.AllDirectories)) .Count(); spinner.Succeed(); }, Patterns.Arc); var tokens = new ConcurrentBag <FileToken>(); var destination = config.Build.Dest; using (var progressBar = new ProgressBar(count, "Tokenizing files...")) { var searchPattern = options.ReportFreshness ? "*.md" : "*.*"; await dir.EnumerateFiles(searchPattern, SearchOption.AllDirectories) .ForEachAsync( Environment.ProcessorCount, async fileInfo => { FileToken fileToken = fileInfo; await fileToken.InitializeAsync(options); var relyingOnCache = options.EnableCaching ? " (relying on cache)" : string.Empty; progressBar.Tick($"Materialzing file tokens{relyingOnCache}...{dirUri.ToRelativePath(fileToken.FilePath)}"); tokens.Add(fileToken); }); progressBar.Tick("Materialization complete..."); } var cachedCount = FileTokenCacheUtility.CachedCount; if (cachedCount > 0) { ConsoleColor.Green.WriteLine($"Materialized {cachedCount:#,#} file tokens from the local cache rather than re-reading and parsing them."); } return(Status.Success, tokens.GroupBy(t => t.FileType).ToDictionary(grp => grp.Key, grp => grp.ToList() as IList <FileToken>)); }
public void AssertCreateView(IConnectionPool pool) { /** * Here we have setup a static connection pool seeded with 10 nodes. We force randomization OnStartup to false * so that we can test the nodes being returned are int the order we expect them to. * So what order we expect? Imagine the following: * * Thread A calls GetNext first without a local cursor and takes the current from the internal global cursor which is 0. * Thread B calls GetNext() second without a local cursor and therefor starts at 1. * After this each thread should walk the nodes in successive order using their local cursor * e.g Thread A might get 0,1,2,3,5 and thread B will get 1,2,3,4,0. */ var startingPositions = Enumerable.Range(0, NumberOfNodes) .Select(i => pool.CreateView().First()) .Select(n => n.Uri.Port) .ToList(); var expectedOrder = Enumerable.Range(9200, NumberOfNodes); startingPositions.Should().ContainInOrder(expectedOrder); /** * What the above code just proved is that each call to GetNext(null) gets assigned the next available node. * * Lets up the ante: * - call get next over `NumberOfNodes * 2` threads * - on each thread call getnext `NumberOfNodes * 10` times using a local cursor. * We'll validate that each thread sees all the nodes and they they wrap over e.g after node 9209 * comes 9200 again */ var threadedStartPositions = new ConcurrentBag <int>(); var threads = Enumerable.Range(0, 20) .Select(i => CreateThreadCallingGetNext(pool, threadedStartPositions)) .ToList(); foreach (var t in threads) { t.Start(); } foreach (var t in threads) { t.Join(); } /** * Each thread reported the first node it started off lets make sure we see each node twice as the first node * because we started `NumberOfNodes * 2` threads */ var grouped = threadedStartPositions.GroupBy(p => p).ToList(); grouped.Count().Should().Be(NumberOfNodes); grouped.Select(p => p.Count()).Should().OnlyContain(p => p == 2); }
static Edges() { Console.WriteLine("Starting to load Edges"); var edgeLines = File.ReadAllLines(AppSettings.Current.EdgesFile); ConcurrentBag <Edge> edges = new ConcurrentBag <Edge>(); Parallel.ForEach(edgeLines, edgeLine => { var edge = JsonConvert.DeserializeObject <Edge>(edgeLine); edges.Add(edge); }); foreach (var item in edges.GroupBy(e => e.Source, (a, b) => new{ Source = a, Edges = b })) { _sourceDictionary.Add(item.Source, item.Edges); } foreach (var item in edges.GroupBy(e => e.Target, (a, b) => new{ Target = a, Edges = b })) { _targedDictionary.Add(item.Target, item.Edges); } Console.WriteLine("Edges loaded"); }
public void AssertCreateView(IConnectionPool pool) { /** So what order do we expect? Imagine the following: * * . Thread A calls `CreateView()` first without a local cursor and takes the current value from the internal global cursor, which is `0` * . Thread B calls `CreateView()` second without a local cursor and therefore starts at `1` * . After this, each thread should walk the nodes in successive order using their local cursor. For example, Thread A might * get 0,1,2,3,5 and thread B will get 1,2,3,4,0. */ var startingPositions = Enumerable.Range(0, NumberOfNodes) .Select(i => pool.CreateView().First()) .Select(n => n.Uri.Port) .ToList(); var expectedOrder = Enumerable.Range(9200, NumberOfNodes); startingPositions.Should().ContainInOrder(expectedOrder); /** * What the above code just proved is that each call to `CreateView()` gets assigned the next available node. * * Lets up the ante: * * . Call `CreateView()` over `NumberOfNodes * 2` threads * . On each thread, call `CreateView()` `NumberOfNodes * 10` times using a local cursor. * * We'll validate that each thread sees all the nodes and that they wrap over, for example, after node 9209 * comes 9200 again */ var threadedStartPositions = new ConcurrentBag <int>(); var threads = Enumerable.Range(0, 20) .Select(i => CreateThreadCallingCreateView(pool, threadedStartPositions)) .ToList(); foreach (var t in threads) { t.Start(); } foreach (var t in threads) { t.Join(); } /** * Each thread reported the first node it started off. Let's make sure we see each node twice * because we started `NumberOfNodes * 2` threads */ var grouped = threadedStartPositions.GroupBy(p => p).ToList(); grouped.Count.Should().Be(NumberOfNodes); grouped.Select(p => p.Count()).Should().OnlyContain(p => p == 2); }
public void SimpleCountIteration() { var singleValues = new ConcurrentBag <SingleMetricHolder>(); var summaryValues = new ConcurrentBag <SummaryMetricHolder>(); using (var factory = new MetricFactory()) { factory.AddCallback("callback1", 1, (object userState, string name, string[] labels, double?value, CancellationToken cancellationToken) => { singleValues.Add(new SingleMetricHolder { SampleTime = DateTime.UtcNow, Labels = labels, Name = name, UserState = userState, Value = value }); return(Task.CompletedTask); }, (object userState, string name, string[] labels, int?count, double?minimum, double?maximum, double?mean, double?standardDeviation, double?p50, double?p90, double?p95, double?p98, double?p99, CancellationToken cancellationToken) => { summaryValues.Add(new SummaryMetricHolder { SampleTime = DateTime.UtcNow, Labels = labels, Name = name, UserState = userState, Count = count, Minimum = minimum, Maximum = maximum, Mean = mean, StandardDeviation = standardDeviation, p50 = p50, p90 = p90, p95 = p95, p98 = p98, p99 = p99 }); return(Task.CompletedTask); }, "test"); var pulse = factory.CreatePulse("Metric1", "Test metric for #1", true, new string[] { "label1", "label2" }); for (var index = 0; index < 500; index++) { pulse.Observe(); if (index < 499) { Task.Delay(10).GetAwaiter().GetResult(); } } Task.Delay(1000).GetAwaiter().GetResult(); } var groupings = singleValues.GroupBy(item => item.Name).ToArray(); Assert.IsTrue(groupings.Length == 1, "Metric count not expected"); foreach (var item in groupings) { var itemList = item.OrderBy(item => item.SampleTime).ToArray(); var mean = itemList.Average(item => item.Value ?? 0); Assert.IsTrue(itemList.Length >= 6, $"Single value count within groupings not within expected tolerance (value { itemList.Length })"); Assert.IsTrue(mean >= 50, $"Mean value count not in tolerance (value { mean })"); } Assert.IsTrue(summaryValues.Count == 0, "Summary value count not expected"); }
/// <summary> /// make a call to a group of phone numbers /// </summary> /// <param name="sentFrom">list of phone numbers to call</param> /// <param name="contentId">database ID of the message to sent</param> /// <returns>returns a summary of message sent</returns> public string GroupCall(string sentFrom, IEnumerable<string> receipientPhoneNbrs, string contentId) { var statuses = new ConcurrentBag<string>(); Parallel.ForEach(receipientPhoneNbrs, (phone) => { var result = CallPhoneNumber(sentFrom, phone, contentId); statuses.Add(result); }); var finalResult = string.Join(Environment.NewLine, statuses.GroupBy(c => c) .Select(c => $"{c.Count()} were sent with a status of {c.Key}")); return finalResult; }
public static async Task <List <UpdateTask> > GetUpdates(CancellationToken cancellationToken, IUpdateSource[] updateSources, string[] filterByGuids = null) { var results = new ConcurrentBag <UpdateTask>(); // First start all of the sources, then wait until they all finish var concurrentTasks = updateSources.Select(source => RetryHelper.RetryOnExceptionAsync( async() => { foreach (var task in await source.GetUpdateItems(cancellationToken)) { // todo move further inside or decouple getting update tasks and actually processing them if (filterByGuids != null && filterByGuids.Length > 0 && !filterByGuids.Contains(task.Info.GUID)) { continue; } task.Items.RemoveAll(x => x.UpToDate); results.Add(task); } }, 3, TimeSpan.FromSeconds(3), cancellationToken)).ToList(); foreach (var task in concurrentTasks) { try { await task; } catch (Exception e) { Console.WriteLine("[ERROR] Unexpected error while collecting updates from one of the sources, skipping the source. " + e); } } cancellationToken.ThrowIfCancellationRequested(); var filteredTasks = new List <UpdateTask>(); foreach (var modGroup in results.GroupBy(x => x.Info.GUID)) { var ordered = modGroup.OrderByDescending(x => x.ModifiedTime ?? DateTime.MinValue).ToList(); if (ordered.Count > 1) { ordered[0].AlternativeSources.AddRange(ordered.Skip(1)); Console.WriteLine($"Found {ordered.Count} entries for mod GUID {modGroup.Key} - latest is from {ordered[0].Info.Origin}"); } filteredTasks.Add(ordered[0]); } return(filteredTasks); }
private void Count() { var codeGroup = _counter.GroupBy(x => x.Response.StatusCode); var codeGroupList = new List <string>(); foreach (var item in codeGroup) { codeGroupList.Add(string.Format("[{0}]:{1}", item.Key, item.Count())); } _console("Http状态码统计:" + string.Join(",", codeGroupList)); var total = _counter.Count; var avgElapsed = _counter.Average(x => x.MilliSeconds); _console(string.Format("总请求数:{0},平均耗时:{1}ms", total, avgElapsed.ToString("0.000"))); }
public IObservable <IResult> GetResults(IObservable <IPEndPoint> epsObs, UdpClient socket, QuerySettings settings) { if (settings == null) { settings = new QuerySettings(); } var mapping = new ConcurrentBag <EpState>(); var obs = Observable.Create <IResult>(o => { var count = 0; var count2 = 0; var count3 = 0; var dsp = new CompositeDisposable(); //var scheduler = new EventLoopScheduler(); //dsp.Add(scheduler); var listener = CreateListener(socket) .Publish(); dsp.Add(listener.Connect()); var sender = epsObs //.ObserveOn(scheduler) .Select(x => new EpState2(x, settings, listener.Where(r => r.RemoteEndPoint.Equals(x)) .Select(r => r.Buffer) .ObserveOn(TaskPoolScheduler.Default), d => socket.SendAsync(d, d.Length, x))) .Do(x => mapping.Add(x)) .Select(x => x.Results) //.Select(x => Intercept(_ => Console.WriteLine($"Sending initial packet: {Interlocked.Increment(ref count)}"), x)) //.ObserveOn(scheduler) .Merge(settings.DegreeOfParallelism); // TODO: Instead try to limit sends at a time? hm //.Do(x => Console.WriteLine($"Finished Processing: {Interlocked.Increment(ref count3)}. {x.Success} {x.Ping}ms")); var sub = sender .Subscribe(o.OnNext, o.OnError, () => { Console.WriteLine($"" + $"Stats: total count: {mapping.Count}\n" + $"{string.Join("\n", mapping.GroupBy(x => x.State).OrderByDescending(x => x.Count()).Select(x => x.Key + " " + x.Count()))}"); o.OnCompleted(); }); dsp.Add(sub); return(dsp); }); // http://stackoverflow.com/questions/12270642/reactive-extension-onnext return(obs.Synchronize()); // Or use lock on the onNext call.. }
public PublishedProviderFundingCount GetTotal() => new PublishedProviderFundingCount { Count = _fundings.Count(), TotalFunding = _fundings.Sum(_ => _.TotalFunding.GetValueOrDefault()), ProviderTypes = _fundings.Select(_ => _.ProviderTypeSubType).Distinct().ToArray(), FundingStreamsFundings = _fundings.GroupBy(fundingStream => fundingStream.FundingStreamId) .Select(fundingStream => new PublishedProivderFundingStreamFunding { FundingStreamId = fundingStream.Key, TotalFunding = fundingStream.Sum(_ => _.TotalFunding.GetValueOrDefault()) }) .ToArray(), LocalAuthorities = _fundings.Select(_ => _.LaCode).Distinct().ToArray() };
public void ExceptionEventsRaised() { var actual = _batchExceptionEvents.GroupBy(e => e.Item1).ToDictionary(g => g.Key, g => g.Select(i => i.Item2).ToList()); var start = _setup.StartItems; Assert.That(actual.Any(kvp => kvp.Value.Count == start.Count(ThrowWhenSquaring))); var squared = start.FindAll(i => !ThrowWhenSquaring(i)).Select(i => i * i).ToList(); Assert.That(actual.Any(kvp => kvp.Value.Count == squared.Count(ThrowWhenConvertingToString))); var convertedToString = squared.FindAll(i => !ThrowWhenConvertingToString(i)).Select(i => i.ToString()).ToList(); Assert.That(actual.Any(kvp => kvp.Value.Count == convertedToString.Count(ThrowWhenPuttingInResultsBag))); }
private void AssertNoViolationsOfUnsupportedTermInRazorFiles(string unsupportedTerm) { var violations = new ConcurrentBag <Tuple <int, string, string> >(); var razorFiles = GetRazorFiles(); // Catch working directory issues. Assert.NotEmpty(razorFiles); Parallel.ForEach( razorFiles, file => { var lineNumber = 0; foreach (var line in File.ReadLines(file)) { lineNumber++; if (line.Contains(unsupportedTerm)) { violations.Add(Tuple.Create(lineNumber, file, line.TrimStart(' ').TrimEnd(' '))); } } }); if (violations.Any()) { _testOutputHelper.WriteLine( $"Avoid usage of '{unsupportedTerm}' in .cshtml files! Consider using a method from 'UrlExtensions.cs' to ensure usage of configured 'SiteRoot' setting."); // Pretty-print any violations: group by file foreach (var violationsInFile in violations.GroupBy(t => t.Item2).OrderBy(g => g.Key)) { _testOutputHelper.WriteLine($"Violation(s) in file '{violationsInFile.Key}':"); // Order by line number foreach (var violation in violationsInFile.OrderBy(v => v.Item1)) { _testOutputHelper.WriteLine( $" Line #{violation.Item1}: \"{violation.Item3}\""); } } // Fail the test Assert.Empty(violations); } }
public IEnumerable<ExternalIssueDetails> GetDetails(IEnumerable<Issue> issues) { int ignoredOut; var tfsIssues = issues.Where(i => IsTfsUrl(i.Url) && int.TryParse(i.Id, out ignoredOut)).ToList(); var queriedIssues = new ConcurrentBag<ExternalIssueDetails>(); Parallel.ForEach(tfsIssues, issue => queriedIssues.Add(GetDetails(issue))); return queriedIssues .GroupBy(issue => issue.Id) .Select(g => { var issue = g.First(); issue.SubIssues = g.SelectMany(i => i.SubIssues).ToList(); return issue; }) .ToList(); }
public void AssertCreateView(IConnectionPool pool) { /** * Here we have setup a static connection pool seeded with 10 nodes. We force randomization OnStartup to false * so that we can test the nodes being returned are int the order we expect them to. * So what order we expect? Imagine the following: * * Thread A calls GetNext first without a local cursor and takes the current from the internal global cursor which is 0. * Thread B calls GetNext() second without a local cursor and therefor starts at 1. * After this each thread should walk the nodes in successive order using their local cursor * e.g Thread A might get 0,1,2,3,5 and thread B will get 1,2,3,4,0. */ var startingPositions = Enumerable.Range(0, NumberOfNodes) .Select(i => pool.CreateView().First()) .Select(n => n.Uri.Port) .ToList(); var expectedOrder = Enumerable.Range(9200, NumberOfNodes); startingPositions.Should().ContainInOrder(expectedOrder); /** * What the above code just proved is that each call to GetNext(null) gets assigned the next available node. * * Lets up the ante: * - call get next over `NumberOfNodes * 2` threads * - on each thread call getnext `NumberOfNodes * 10` times using a local cursor. * We'll validate that each thread sees all the nodes and they they wrap over e.g after node 9209 * comes 9200 again */ var threadedStartPositions = new ConcurrentBag<int>(); var threads = Enumerable.Range(0, 20) .Select(i => CreateThreadCallingGetNext(pool, threadedStartPositions)) .ToList(); foreach (var t in threads) t.Start(); foreach (var t in threads) t.Join(); /** * Each thread reported the first node it started off lets make sure we see each node twice as the first node * because we started `NumberOfNodes * 2` threads */ var grouped = threadedStartPositions.GroupBy(p => p).ToList(); grouped.Count().Should().Be(NumberOfNodes); grouped.Select(p => p.Count()).Should().OnlyContain(p => p == 2); }
public static void PscFailedAndSucceededBagsPerFlight(StatisticsData data, ConcurrentBag <Baggage> baggages) { var bagsGroupedPerFlight = baggages.GroupBy(b => b.Flight.FlightNumber); if (data.PscSucceededBagsPerFlight != null && data.PscSucceededBagsPerFlight.Count() > 0) { data.PscSucceededBagsPerFlight.Clear(); data.PscFailedBagsPerFlight.Clear(); } foreach (var group in bagsGroupedPerFlight) { var succeededBagsPerFlight = group.Where(b => b.Logs.Any(log => log.Description.Contains(LoggingConstants.PrimarySecurityCheckSucceeded))).ToList(); var failedBagsPerFlight = group.Where(b => b.Logs.Any(log => log.Description.Contains(LoggingConstants.PrimarySecurityCheckFailed))).ToList(); data.PscSucceededBagsPerFlight.Add(group.Key, succeededBagsPerFlight.Count()); data.PscFailedBagsPerFlight.Add(group.Key, failedBagsPerFlight.Count()); } }
public List <Tuple <CypherTypeItem, List <CypherProperty> > > Set() { //set the properties var returnValue = _properties.GroupBy(x => x.Item1) .Select(x => new Tuple <CypherTypeItem, List <CypherProperty> >(new CypherTypeItem() { AttributeType = x.Key, Type = typeof(T) }, x.Select(y => y.Item2).Distinct(new CypherPropertyComparer()).ToList())).ToList(); returnValue.ForEach(x => CypherExtension.AddConfigProperties(x.Item1, x.Item2)); //set the label if (!string.IsNullOrWhiteSpace(_label)) { CypherExtension.SetConfigLabel(typeof(T), _label); } return(returnValue); }
private async Task StartSimulation() { if(Simulator.SongData==null) { MessageBox.Show("楽曲を選んでください"); return; } if (Simulator.Unit == null) { MessageBox.Show("ユニットを選んでください"); return; } if (Runs < 1 || Runs > 1000000) { MessageBox.Show("試行回数は1から1,000,000までである必要があります"); return; } Note[] pattern = null; if (UtilizeActualPattern) { pattern = await new PatternProvider().GetPattern(Simulator.Song, Simulator.SongData.Difficulty, Simulator.SongData.Notes); if (pattern == null) { MessageBox.Show($"{Simulator.Song.Title}({Simulator.SongData.Difficulty})の譜面データが見つかりませんでした。"); return; } } SimulationCompleted = false; var results = new ConcurrentBag<SimulationResult>(); await Task.Run(() => Parallel.For(1, Runs+1, i => results.Add(Simulator.StartSimulation(RandomFactory.Create(), i, pattern == null ? null : new Queue<Note>(pattern))))); MaxScore = results.Max(x=>x.Score); MaxScorePerNote = results.Max(x => x.ScorePerNote); MinScore = results.Min(x => x.Score); MinScorePerNote = results.Min(x => x.ScorePerNote); AverageScore = (int)results.Average(x => x.Score); AverageScorePerNote = (int)results.Average(x => x.ScorePerNote); ScoreDistribution = results.GroupBy(x => (int)Math.Floor(x.Score / 10000.0)).OrderBy(x => x.Key).ToDictionary(x => x.Key, x => (double)x.Count() / results.Count); StandardDeviation = Math.Round(Math.Sqrt(results.Sum(x => Math.Pow(x.Score - AverageScore, 2))) / results.Count); int idx = 1; var duration = results.First().Duration; ActualTriggerRatio = Simulator.Unit.Slots.ToDictionary(s => $"スロット{idx++}", s => s == null ? 0 : results.SelectMany(x => x.TriggeredSkills).Where(x => x.Who == s).Count() / (results.Count * Math.Floor((duration - 1.0) / s.Skill.Interval))); SimulationResults = results.OrderBy(x => x.Id).Take(100).ToList(); SelectedResult = SimulationResults[0]; SimulationCompleted = true; }
/// <summary> /// Applies the loaded templates to <paramref name="templateData"/>. /// </summary> /// <param name="templateData"> /// Instance of <see cref="TemplateData"/> containing the various input data needed. /// </param> public virtual TemplateOutput Generate(TemplateData templateData) { Stopwatch timer = Stopwatch.StartNew(); ParsedTemplate tmpl = this.PrepareTemplate(templateData); // collect all work that has to be done List<UnitOfWork> work = new List<UnitOfWork>(); // resource work units work.AddRange(this.DiscoverWork(templateData, tmpl.Resources)); // stylesheet work units { List<StylesheetApplication> stylesheetApplications = new List<StylesheetApplication>(); foreach (Stylesheet stylesheet in tmpl.Stylesheets) { stylesheetApplications.AddRange(this.DiscoverWork(templateData, stylesheet)); } var duplicates = stylesheetApplications.GroupBy(sa => sa.SaveAs, StringComparer.OrdinalIgnoreCase) .Where(g => g.Count() > 1); foreach (var group in duplicates) { TraceSources.TemplateSource.TraceCritical("Duplicate work unit target ({0}) generated from: {1}", group.Key, string.Join(", ", group.Select( sa => '\'' + sa.StylesheetName + '\''))); // TODO replace this with something more specific // throw new Exception("Critical error, continuing is not safe."); } work.AddRange(stylesheetApplications); } TraceSources.TemplateSource.TraceInformation("Generating {0:N0} documents from {1:N0} stylesheets.", work.Count, tmpl.Stylesheets.Length); ConcurrentBag<WorkUnitResult> results = new ConcurrentBag<WorkUnitResult>(); // create context ITemplatingContext context = new TemplatingContext(this._basePath, templateData, this._resolvers, this._fileProvider); // process all units of work Parallel.ForEach(work, uow => results.Add(uow.Execute(context))); // stop timing timer.Stop(); // prepare stats Dictionary<Type, WorkUnitResult[]> resultGroups = results.GroupBy(ps => ps.WorkUnit.GetType()).ToDictionary(g => g.Key, g => g.ToArray()); var stylesheetStats = resultGroups[typeof(StylesheetApplication)] .GroupBy(r => ((StylesheetApplication)r.WorkUnit).StylesheetName); foreach (var statGroup in stylesheetStats) { TraceSources.TemplateSource.TraceInformation("Applied stylesheet '{0}' {1:N0} times in {2:N0} ms (min: {3:N0}, mean {4:N0}, max {5:N0}, avg: {6:N0})", statGroup.Key, statGroup.Count(), statGroup.Sum(ps => ps.Duration) / 1000.0, statGroup.Min(ps => ps.Duration) / 1000.0, statGroup.Skip(statGroup.Count() / 2).Take(1).Single().Duration / 1000.0, statGroup.Max(ps => ps.Duration) / 1000.0, statGroup.Average(ps => ps.Duration) / 1000.0); } var resourceStats = resultGroups[typeof(ResourceDeployment)]; foreach (var statGroup in resourceStats) { TraceSources.TemplateSource.TraceInformation("Deployed resource '{0}' in {1:N0} ms", ((ResourceDeployment)statGroup.WorkUnit).ResourcePath, statGroup.Duration); } TraceSources.TemplateSource.TraceInformation("Documentation generated in {0:N1} seconds (processing time: {1:N1} seconds)", timer.Elapsed.TotalSeconds, results.Sum(ps => ps.Duration) / 1000000.0); return new TemplateOutput(results.ToArray()); }
static void Main(string[] args) { // to cover our back for all those fire and forgets TaskScheduler.UnobservedTaskException += TaskScheduler_UnobservedTaskException; Console.ForegroundColor = ConsoleColor.Gray; ThreadPool.SetMinThreads(200, 100); ThreadPool.SetMaxThreads(1000, 200); var statusCodes = new ConcurrentBag<HttpStatusCode>(); var commandLineOptions = new CommandLineOptions(); bool isHelp = args.Any(x => x == "-?"); var success = Parser.Default.ParseArguments(args, commandLineOptions); var then = DateTime.Now; ConsoleWriteLine(ConsoleColor.DarkCyan, "Starting at {0}", then) ; if (!success || isHelp) { if (!isHelp && args.Length > 0) ConsoleWriteLine(ConsoleColor.Red, "error parsing command line"); return; } try { var requester = new Requester(commandLineOptions); var writer = new StreamWriter(commandLineOptions.LogFile) { AutoFlush = true }; var stopwatch = Stopwatch.StartNew(); var timeTakens = new ConcurrentBag<double>(); if (commandLineOptions.SaveResponses) { if (string.IsNullOrEmpty(commandLineOptions.ResponseFolder)) { commandLineOptions.ResponseFolder = Path.Combine(Environment.CurrentDirectory, "Responses"); } if (!Directory.Exists(commandLineOptions.ResponseFolder)) Directory.CreateDirectory(commandLineOptions.ResponseFolder); } ConsoleWriteLine(ConsoleColor.Yellow, "[Press C to stop the test]"); int total = 0; bool disrupted = false; var stop = new ConsoleKeyInfo(); Console.ForegroundColor = ConsoleColor.Cyan; var source = new CancellationTokenSource(TimeSpan.FromDays(7)); Task.Run(() => { stop = Console.ReadKey(true); disrupted = true; }, source.Token); var result = Parallel.For(0, commandLineOptions.IsDryRun ? 1 : commandLineOptions.NumberOfRequests, new ParallelOptions() { MaxDegreeOfParallelism = commandLineOptions.Concurrency }, (i, loopstate) => { if (disrupted) { ConsoleWriteLine(ConsoleColor.Red, "..."); ConsoleWriteLine(ConsoleColor.Green, "Exiting.... please wait! (it might throw a few more requests)"); ConsoleWriteLine(ConsoleColor.Red, ""); loopstate.Stop(); source.Cancel(); } var sw = Stopwatch.StartNew(); IDictionary<string, object> parameters; var statusCode = requester.Next(i, out parameters); sw.Stop(); if (commandLineOptions.DelayInMillisecond > 0) { Thread.Sleep(commandLineOptions.DelayInMillisecond); } statusCodes.Add(statusCode); timeTakens.Add(sw.ElapsedTicks); var n = Interlocked.Increment(ref total); // fire and forget not to affect time taken or TPS Task.Run(() => WriteLine(writer, n, (int)statusCode, sw.ElapsedMilliseconds, parameters)); if (!commandLineOptions.Verbose) Console.Write("\r" + total); } ); stopwatch.Stop(); double[] orderedList = (from x in timeTakens orderby x select x).ToArray<double>(); Console.WriteLine(); ConsoleWriteLine(ConsoleColor.Magenta, "---------------Finished!----------------"); var now = DateTime.Now; ConsoleWriteLine(ConsoleColor.DarkCyan, "Finished at {0} (took {1})", now, now - then); // ----- adding stats of statuses returned var stats = statusCodes.GroupBy(x => x) .Select(y => new { Status = y.Key, Count = y.Count() }).OrderByDescending(z => z.Count); foreach (var stat in stats) { int statusCode = (int)stat.Status; if (statusCode >= 400 && statusCode < 600) { ConsoleWriteLine(ConsoleColor.Red, string.Format("Status {0}: {1}", statusCode, stat.Count)); } else { ConsoleWriteLine(ConsoleColor.Green, string.Format("Status {0}: {1}", statusCode, stat.Count)); } } Console.WriteLine(); Console.ForegroundColor = ConsoleColor.Yellow; Console.Write("TPS: " + Math.Round(total * 1000f / stopwatch.ElapsedMilliseconds, 1)); Console.WriteLine(" (requests/second)"); Console.WriteLine("Max: " + (timeTakens.Max() * 1000 / Stopwatch.Frequency) + "ms"); Console.WriteLine("Min: " + (timeTakens.Min() * 1000 / Stopwatch.Frequency) + "ms"); Console.WriteLine("Avg: " + (timeTakens.Average() * 1000 / Stopwatch.Frequency) + "ms"); Console.ForegroundColor = ConsoleColor.DarkGreen; Console.WriteLine(); Console.WriteLine(" 50%\tbelow " + Math.Round((double)((orderedList.Percentile<double>(50M) * 1000.0) / ((double)Stopwatch.Frequency))) + "ms"); Console.WriteLine(" 60%\tbelow " + Math.Round((double)((orderedList.Percentile<double>(60M) * 1000.0) / ((double)Stopwatch.Frequency))) + "ms"); Console.WriteLine(" 70%\tbelow " + Math.Round((double)((orderedList.Percentile<double>(70M) * 1000.0) / ((double)Stopwatch.Frequency))) + "ms"); Console.WriteLine(" 80%\tbelow " + Math.Round((double)((orderedList.Percentile<double>(80M) * 1000.0) / ((double)Stopwatch.Frequency))) + "ms"); Console.WriteLine(" 90%\tbelow " + Math.Round((double)((orderedList.Percentile<double>(90M) * 1000.0) / ((double)Stopwatch.Frequency))) + "ms"); Console.WriteLine(" 95%\tbelow " + Math.Round((double)((orderedList.Percentile<double>(95M) * 1000.0) / ((double)Stopwatch.Frequency))) + "ms"); Console.WriteLine(" 98%\tbelow " + Math.Round((double)((orderedList.Percentile<double>(98M) * 1000.0) / ((double)Stopwatch.Frequency))) + "ms"); Console.WriteLine(" 99%\tbelow " + Math.Round((double)((orderedList.Percentile<double>(99M) * 1000.0) / ((double)Stopwatch.Frequency))) + "ms"); Console.WriteLine("99.9%\tbelow " + Math.Round((double)((orderedList.Percentile<double>(99.9M) * 1000.0) / ((double)Stopwatch.Frequency))) + "ms"); } catch (Exception exception) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine(exception); } Console.ResetColor(); }
/// <summary> /// Applies the loaded templates to <paramref name="templateData"/>. /// </summary> /// <param name="templateData"> /// Instance of <see cref="TemplateData"/> containing the various input data needed. /// </param> public virtual TemplateOutput Generate(TemplateData templateData) { Stopwatch timer = Stopwatch.StartNew(); ParsedTemplate tmpl = this.PrepareTemplate(templateData); // collect all work that has to be done List<UnitOfWork> work = new List<UnitOfWork>(); // resource work units work.AddRange(this.DiscoverWork(templateData, tmpl.Resources)); // stylesheet work units { List<StylesheetApplication> stylesheetApplications = new List<StylesheetApplication>(); foreach (Stylesheet stylesheet in tmpl.Stylesheets) { stylesheetApplications.AddRange(this.DiscoverWork(templateData, stylesheet)); } var duplicates = stylesheetApplications.GroupBy(sa => sa.SaveAs, StringComparer.OrdinalIgnoreCase) .Where(g => g.Count() > 1); foreach (var group in duplicates) { TraceSources.TemplateSource.TraceError("Duplicate work unit target ({0}) generated from: {1}", group.Key, string.Join(", ", group.Select(sa => '\'' + sa.StylesheetName + '\''))); foreach (var workunit in group.Skip(1)) { stylesheetApplications.Remove(workunit); } } work.AddRange(stylesheetApplications); } TraceSources.TemplateSource.TraceInformation("Generating {0:N0} documents from {1:N0} stylesheets.", work.Count, tmpl.Stylesheets.Length); ConcurrentBag<WorkUnitResult> results = new ConcurrentBag<WorkUnitResult>(); // create context ITemplatingContext context = new TemplatingContext(this._cache, this._basePath, templateData, this._resolvers, this._fileProvider); // fill indices using (TraceSources.TemplateSource.TraceActivity("Indexing input document")) { var customXsltContext = CreateCustomXsltContext(templateData.IgnoredVersionComponent); foreach (var index in tmpl.Indices) { TraceSources.TemplateSource.TraceVerbose("Adding index {0} (match: '{1}', key: '{1}')", index.Name, index.MatchExpr, index.KeyExpr); context.DocumentIndex.AddKey(index.Name, index.MatchExpr, index.KeyExpr, customXsltContext); } TraceSources.TemplateSource.TraceInformation("Indexing..."); context.DocumentIndex.BuildIndexes(); } int totalCount = work.Count; long lastProgress = Stopwatch.GetTimestamp(); int processed = 0; // process all units of work ParallelOptions parallelOptions = new ParallelOptions { //MaxDegreeOfParallelism = 1 }; Parallel.ForEach(work, parallelOptions, uow => { results.Add(uow.Execute(context)); int c = Interlocked.Increment(ref processed); long lp = Interlocked.Read(ref lastProgress); if ((Stopwatch.GetTimestamp() - lp) / (double)Stopwatch.Frequency > 5.0) { if (Interlocked.CompareExchange(ref lastProgress, Stopwatch.GetTimestamp(), lp) == lp) { TraceSources.TemplateSource.TraceInformation( "Progress: {0:P1} ({1:N0}/{2:N0})", c / (double)totalCount, c, totalCount); } } }); // stop timing timer.Stop(); // prepare stats Dictionary<Type, WorkUnitResult[]> resultGroups = results.GroupBy(ps => ps.WorkUnit.GetType()).ToDictionary(g => g.Key, g => g.ToArray()); var stylesheetStats = resultGroups[typeof(StylesheetApplication)] .GroupBy(r => ((StylesheetApplication)r.WorkUnit).StylesheetName); foreach (var statGroup in stylesheetStats) { TraceSources.TemplateSource.TraceInformation("Applied stylesheet '{0}' {1:N0} times in {2:N0} ms (min: {3:N0}, mean {4:N0}, max {5:N0}, avg: {6:N0})", statGroup.Key, statGroup.Count(), statGroup.Sum(ps => ps.Duration) / 1000.0, statGroup.Min(ps => ps.Duration) / 1000.0, statGroup.Skip(statGroup.Count() / 2).Take(1).Single().Duration / 1000.0, statGroup.Max(ps => ps.Duration) / 1000.0, statGroup.Average(ps => ps.Duration) / 1000.0); } var resourceStats = resultGroups[typeof(ResourceDeployment)]; foreach (var statGroup in resourceStats) { TraceSources.TemplateSource.TraceInformation("Deployed resource '{0}' in {1:N0} ms", ((ResourceDeployment)statGroup.WorkUnit).ResourcePath, statGroup.Duration); } TraceSources.TemplateSource.TraceInformation("Documentation generated in {0:N1} seconds (processing time: {1:N1} seconds)", timer.Elapsed.TotalSeconds, results.Sum(ps => ps.Duration) / 1000000.0); return new TemplateOutput(results.ToArray()); }
public void RoundRobinShouldEvenlyDistributeAcrossManyPartitions() { const int TotalPartitions = 100; var selector = new DefaultPartitionSelector(); var partitions = new List<Partition>(); for (int i = 0; i < TotalPartitions; i++) { partitions.Add(new Partition { LeaderId = i, PartitionId = i }); } var topic = new Topic { Name = "a", Partitions = partitions }; var bag = new ConcurrentBag<Partition>(); Parallel.For(0, TotalPartitions * 3, x => bag.Add(selector.Select(topic, null))); var eachPartitionHasThree = bag.GroupBy(x => x.PartitionId).Count(); Assert.That(eachPartitionHasThree, Is.EqualTo(TotalPartitions), "Each partition should have received three selections."); }
private static void PrintResults(int itemsToStore, ConcurrentBag<Stopwatch> individualTimings, Stopwatch stopwatch, int totalSize) { Console.WriteLine("Elapsed: " + stopwatch.Elapsed); Console.WriteLine("Bytes: " + totalSize / 1024 / 1024 + "MB"); // redis style int runningTotal = 0; foreach (var timingGroup in individualTimings .GroupBy(x => x.ElapsedMilliseconds <= 10 ? x.ElapsedMilliseconds : ((int)(x.ElapsedMilliseconds / 10)) * 10 + 10) // normalize to 10 second intervals, rounded up .OrderBy(x => x.Key)) { runningTotal += timingGroup.Count(); Console.WriteLine("{0}% <= {1} milliseconds", runningTotal / (double)itemsToStore * 100, timingGroup.Key); } }
internal static void Search(string folder) { var hoods = new ConcurrentBag<string>(); Parallel.ForEach(Directory.EnumerateFiles(Path.Combine(folder, "tweets")), f => { using (var fs = new FileStream(f, FileMode.Open)) { var buf = new byte[BufSize]; var bufOffset = 0; // offset into the buffer of the chunk currently read (0 or BufSize / 2) var start = 0; // offset into the buffer where the current field started var cur = 0; // number of bytes in current field var field = 0; // field number var hoodStart = 0; // offset into the buffer where the hood field started var hoodCount = 0; // number of bytes in hood field for (;;) { var read = fs.Read(buf, bufOffset, BufSize / 2); if (read == 0) break; for (var i = 0; i < read; i++, cur++) { var b = buf[bufOffset + i]; if (b == '\t' || b == '\n') { if (field == 1) { hoodStart = start; hoodCount = cur - 1; } else if (field == 3) { if (NaiveSearch(buf, start, start + cur)) { var hood = (hoodStart + hoodCount) <= BufSize ? Encoding.UTF8.GetString(buf, hoodStart, hoodCount) : Encoding.UTF8.GetString(buf, hoodStart, BufSize - hoodStart) + Encoding.UTF8.GetString(buf, 0, (hoodStart + hoodCount) - BufSize); hoods.Add(hood); } } start = bufOffset + i + 1; cur = 0; field = (field + 1) % 4; } } bufOffset = (bufOffset + BufSize / 2) % BufSize; } } }); var counts = hoods.GroupBy(t => t) .OrderByDescending(g => g.Count()) .ThenBy(g => g.Key) .Select(h => $"{h.Key}\t{h.Count()}"); File.WriteAllLines(Path.Combine(folder, "cs_binary_output"), counts); }
public void DCAwareRoundRobinPolicyCachesLocalNodes() { var hostList = new List<Host> { TestHelper.CreateHost("0.0.0.1", "dc1"), TestHelper.CreateHost("0.0.0.2", "dc2"), TestHelper.CreateHost("0.0.0.3", "dc1"), TestHelper.CreateHost("0.0.0.4", "dc2"), TestHelper.CreateHost("0.0.0.5", "dc1"), TestHelper.CreateHost("0.0.0.6", "dc2"), TestHelper.CreateHost("0.0.0.7", "dc1"), TestHelper.CreateHost("0.0.0.8", "dc2"), TestHelper.CreateHost("0.0.0.9", "dc1"), TestHelper.CreateHost("0.0.0.10", "dc2") }; const string localDc = "dc1"; var clusterMock = new Mock<ICluster>(); clusterMock .Setup(c => c.AllHosts()) .Returns(hostList); //Initialize the balancing policy var policy = new DCAwareRoundRobinPolicy(localDc, 1); policy.Initialize(clusterMock.Object); var instances = new ConcurrentBag<object>(); Action action = () => instances.Add(policy.GetHosts()); TestHelper.ParallelInvoke(action, 100); Assert.AreEqual(1, instances.GroupBy(i => i.GetHashCode()).Count()); }
static void Main(string[] args) { Console.WriteLine("Crank v{0}", typeof(Program).Assembly.GetName().Version); if (args.Length < 2) { Console.WriteLine("Usage: crank [url] [numclients] <batchSize> <batchInterval>"); return; } ServicePointManager.DefaultConnectionLimit = Int32.MaxValue; string url = args[0]; int clients = Int32.Parse(args[1]); int batchSize = args.Length < 3 ? 50 : Int32.Parse(args[2]); int batchInterval = args.Length < 4 ? 500 : Int32.Parse(args[3]); // Increase the number of min threads in the threadpool ThreadPool.SetMinThreads(clients, 2); TaskScheduler.UnobservedTaskException += OnUnobservedTaskException; var connections = new ConcurrentBag<Connection>(); var totalRunStopwatch = Stopwatch.StartNew(); Task.Run(async () => { Console.WriteLine("Ramping up connections. Batch size {0}.", batchSize); var rampupStopwatch = Stopwatch.StartNew(); await ConnectBatches(url, clients, batchSize, batchInterval, connections); Console.WriteLine("Started {0} connection(s).", connections.Count); Console.WriteLine("Setting up event handlers"); rampupStopwatch.Stop(); Console.WriteLine("Ramp up complete in {0}.", rampupStopwatch.Elapsed); }); Console.WriteLine("Press 'q' to quit."); while (true) { var keyInfo = Console.ReadKey(intercept: true); if (keyInfo.Key == ConsoleKey.Q) { break; } Console.WriteLine("Total Running time: {0}", totalRunStopwatch.Elapsed); Console.WriteLine("End point: {0}", url); Console.WriteLine("Total connections: {0}", clients); foreach (var g in connections.GroupBy(c => c.State)) { Console.WriteLine(g.Key + " connections: {0}", g.Count()); } foreach (var g in connections.GroupBy(c => c.Transport.Name)) { Console.WriteLine(g.Key + " connections: {0}", g.Count()); } } totalRunStopwatch.Stop(); _running = false; Console.WriteLine("Closing connection(s)."); Parallel.ForEach(connections, connection => connection.Stop()); }
/// <summary> /// Applies the loaded templates to <paramref name="templateData"/>. /// </summary> /// <param name="templateData"> /// Instance of <see cref="TemplateData"/> containing the various input data needed. /// </param> public virtual TemplateOutput Generate(TemplateData templateData) { Stopwatch timer = Stopwatch.StartNew(); this._fileResolver.Clear(); ParsedTemplate tmpl = this.PrepareTemplate(templateData); // collect all work that has to be done List<UnitOfWork> work = new List<UnitOfWork>(); // resource work units work.AddRange(this.DiscoverWork(templateData, tmpl.Parameters, tmpl.Resources)); // stylesheet work units { List<StylesheetApplication> stylesheetApplications = new List<StylesheetApplication>(); foreach (Stylesheet stylesheet in tmpl.Stylesheets) { stylesheetApplications.AddRange(this.DiscoverWork(templateData, tmpl.Parameters, stylesheet)); } var duplicates = stylesheetApplications.GroupBy(sa => sa.SaveAs, StringComparer.OrdinalIgnoreCase) .Where(g => g.Count() > 1); foreach (var group in duplicates) { TraceSources.TemplateSource.TraceError("Duplicate work unit target ({0}) generated from: {1}", group.Key, string.Join(", ", group.Select(sa => '\'' + sa.StylesheetName + '\''))); foreach (var workunit in group.Skip(1)) { stylesheetApplications.Remove(workunit); } } work.AddRange(stylesheetApplications); } TraceSources.TemplateSource.TraceInformation("Generating {0:N0} documents from {1:N0} stylesheets.", work.Count, tmpl.Stylesheets.Length); ConcurrentBag<WorkUnitResult> results = new ConcurrentBag<WorkUnitResult>(); // create context ITemplatingContext context = new TemplatingContext(this._cache, this._container, templateData.OutputFileProvider, // TODO fix this (this._basePath) templateData, this._resolvers, this._templateInfo.Source); // fill indices using (TraceSources.TemplateSource.TraceActivity("Indexing input document")) { var customXsltContext = CreateCustomXsltContext(templateData.IgnoredVersionComponent); foreach (var index in tmpl.Indices) { TraceSources.TemplateSource.TraceVerbose("Adding index {0} (match: '{1}', key: '{1}')", index.Name, index.MatchExpr, index.KeyExpr); context.DocumentIndex.AddKey(index.Name, index.MatchExpr, index.KeyExpr, customXsltContext); } TraceSources.TemplateSource.TraceInformation("Indexing..."); context.DocumentIndex.BuildIndexes(); } int totalCount = work.Count; long lastProgress = Stopwatch.GetTimestamp(); int processed = 0; // process all units of work ParallelOptions parallelOptions = new ParallelOptions { //MaxDegreeOfParallelism = 1 }; IEnumerable<UnitOfWork> unitsOfWork = work; if (templateData.Filter != null) { unitsOfWork = unitsOfWork .Where(uow => { if (templateData.Filter(uow)) return true; TraceSources.TemplateSource.TraceInformation("Filtered unit of work: [{0}] {1}", uow.GetType().Name, uow.ToString()); return false; }); } Parallel.ForEach(unitsOfWork, parallelOptions, uow => { results.Add(uow.Execute(context)); int c = Interlocked.Increment(ref processed); long lp = Interlocked.Read(ref lastProgress); if ((Stopwatch.GetTimestamp() - lp) / (double)Stopwatch.Frequency > 5.0) { if (Interlocked.CompareExchange(ref lastProgress, Stopwatch.GetTimestamp(), lp) == lp) { TraceSources.TemplateSource.TraceInformation( "Progress: {0:P1} ({1:N0}/{2:N0})", c / (double)totalCount, c, totalCount); } } }); // stop timing timer.Stop(); Stopwatch statsTimer = new Stopwatch(); // prepare stats Dictionary<Type, WorkUnitResult[]> resultGroups = results.GroupBy(ps => ps.WorkUnit.GetType()).ToDictionary(g => g.Key, g => g.ToArray()); var stylesheetStats = resultGroups[typeof(StylesheetApplication)] .GroupBy(r => ((StylesheetApplication)r.WorkUnit).StylesheetName); foreach (var statGroup in stylesheetStats) { long min = statGroup.Min(ps => ps.Duration); long max = statGroup.Max(ps => ps.Duration); TraceSources.TemplateSource.TraceInformation("Applied stylesheet '{0}' {1:N0} times in {2:N0} ms (min: {3:N0}, mean {4:N0}, max {5:N0}, avg: {6:N0})", statGroup.Key, statGroup.Count(), statGroup.Sum(ps => ps.Duration) / 1000.0, min / 1000.0, statGroup.Skip(statGroup.Count() / 2).Take(1).Single().Duration / 1000.0, max / 1000.0, statGroup.Average(ps => ps.Duration) / 1000.0); // TODO this is quick and dirty, should be cleaned up long[] buckets = new long[20]; int rows = 6; /* ┌────────────────────┐ ◄ 230 │█ █│ │█ █│ │█ █│ │█ █│ │█ █│ │█__________________█│ └────────────────────┘ ◄ 0 ▲ 12ms ▲ 12ms */ // this is a little hacky, but it will do for now WorkUnitResult[] sortedResults = statGroup.OrderBy(r => r.Duration).ToArray(); double bucketSize = (max - min) / (double)buckets.Length; int bucketNum = 0; long bucketMax = 0; foreach (WorkUnitResult result in sortedResults) { while ((result.Duration - min) > (bucketNum + 1) * bucketSize) bucketNum++; buckets[bucketNum] += 1; bucketMax = Math.Max(buckets[bucketNum], bucketMax); } double rowHeight = bucketMax / (double)rows; StringBuilder graph = new StringBuilder(); graph.AppendLine("Graph:"); const int gutter = 2; int columnWidth = graph.Length; graph.Append('┌').Append('─', buckets.Length).Append('┐').Append('◄').Append(' ').Append(bucketMax.ToString("N0")); int firstLineLength = graph.Length - columnWidth; columnWidth = graph.Length - columnWidth + gutter; StringBuilder lastLine = new StringBuilder(); lastLine.Append('▲').Append(' ').Append((min / 1000.0).ToString("N0")).Append("ms"); lastLine.Append(' ', (buckets.Length + 2) - lastLine.Length - 1); lastLine.Append('▲').Append(' ').Append((max / 1000.0).ToString("N0")).Append("ms"); columnWidth = Math.Max(columnWidth, lastLine.Length + gutter); if (columnWidth > firstLineLength) graph.Append(' ', columnWidth - firstLineLength); graph.AppendLine("Percentage of the applications processed within a certain time (ms)"); for (int row = 0; row < rows; row++) { // │┌┐└┘─ graph.Append('│'); for (int col = 0; col < buckets.Length; col++) { if (buckets[col] > (rowHeight * (rows - (row + 1)) + rowHeight / 2.0)) graph.Append('█'); else if (buckets[col] > rowHeight * (rows - (row + 1))) graph.Append('▄'); else if (row == rows - 1) graph.Append('_'); else graph.Append(' '); } graph.Append('│'); graph.Append(' ', columnWidth - (buckets.Length + 2)); switch (row) { case 0: graph.Append(" 100% ").Append((max / 1000.0).ToString("N0")); break; case 1: graph.Append(" 95% ").Append((sortedResults[((int)Math.Floor(sortedResults.Length * 0.95))].Duration / 1000.0).ToString("N0")); break; case 2: graph.Append(" 90% ").Append((sortedResults[((int)Math.Floor(sortedResults.Length * .9))].Duration / 1000.0).ToString("N0")); break; case 3: graph.Append(" 80% ").Append((sortedResults[((int)Math.Floor(sortedResults.Length * 0.8))].Duration / 1000.0).ToString("N0")); break; case 4: graph.Append(" 70% ").Append((sortedResults[((int)Math.Floor(sortedResults.Length * 0.7))].Duration / 1000.0).ToString("N0")); break; case 5: graph.Append(" 50% ").Append((sortedResults[((int)Math.Floor(sortedResults.Length * 0.5))].Duration / 1000.0).ToString("N0")); break; } graph.AppendLine(); } int len = graph.Length; graph.Append('└').Append('─', buckets.Length).Append('┘').Append('◄').Append(" 0"); len = graph.Length - len; if (columnWidth > len) graph.Append(' ', columnWidth - len); graph.Append(" 10% ").Append((sortedResults[((int)Math.Floor(sortedResults.Length * .1))].Duration / 1000.0).ToString("N0")); graph.AppendLine(); lastLine.Append(' ', columnWidth - lastLine.Length); lastLine.Append(" 1% ").Append((sortedResults[((int)Math.Floor(sortedResults.Length * .01))].Duration / 1000.0).ToString("N0")); graph.Append(lastLine.ToString()); TraceSources.TemplateSource.TraceVerbose(graph.ToString()); } var resourceStats = resultGroups[typeof(ResourceDeployment)]; foreach (var statGroup in resourceStats) { TraceSources.TemplateSource.TraceInformation("Deployed resource '{0}' in {1:N0} ms", ((ResourceDeployment)statGroup.WorkUnit).ResourcePath, statGroup.Duration); } TraceSources.TemplateSource.TraceInformation("Documentation generated in {0:N1} seconds (processing time: {1:N1} seconds)", timer.Elapsed.TotalSeconds, results.Sum(ps => ps.Duration) / 1000000.0); TraceSources.TemplateSource.TraceInformation("Statistics generated in {0:N1} seconds", statsTimer.Elapsed.TotalSeconds); return new TemplateOutput(results.ToArray(), tmpl.TemporaryFiles); }