public NaiveBayesClassifier(IDictionary <TCategory, IDistribution <TFeature> > knownInstances, double?kSmoothFactor) { _kSmoothFactor = kSmoothFactor; if (knownInstances == null) { throw new ArgumentNullException("knownInstances"); } //throw new NotImplementedException("bug: SOUNDS LIKE A TOTAL REPEATING EVENT COUNT!!!"); _globalTotalEventCount = knownInstances.Sum(d => d.Value.TotalEventCountWithNoise); //not exactly true because some features might be in multiple categories and should count only once _globalDistinctEventCount = knownInstances.Sum(d => d.Value.DistinctEventsCountWithNoise); _categoryAndFeatures = knownInstances.ToDictionary( _ => _.Key, distr => new DistributionAndMinEventCount <TFeature> { Distribution = distr.Value, // APPROXIMATION! Noise feature occurs on average twice less than the least frequent represented feature. // It might increase significance of noise in terms of frequencies (because noise distributes according to Zipf's law rather than linear distribution. // Therefore denominator of > 2.0 might be better AverageNoiseFeatureFrequencyLog = Math.Log(distr.Value.DistinctRepresentedEvents.Select(distr.Value.GetEventCount).DefaultIfEmpty(0).Min() / 2.0 / distr.Value.TotalNoiseEventsCount), TotalEventCountWithNoiseLog = Math.Log(distr.Value.TotalEventCountWithNoise), TotalEventCountWithKSmoothingLog = _kSmoothFactor.HasValue ? Math.Log(distr.Value.TotalEventCountWithNoise + _globalDistinctEventCount * _kSmoothFactor.Value) : (double?)null, CategoryFrequencyLog = Math.Log(distr.Value.TotalEventCountWithNoise) - Math.Log(_globalTotalEventCount), }); }
public string Day() { var sb = new StringBuilder(); var dayEnergy = 0d; var dayOre = 0d; var harvestNeededEnergyForDay = 0d; totalEnergyStored += providers.Sum(x => x.Value.EnergyOutput); dayEnergy = providers.Sum(x => x.Value.EnergyOutput); harvestNeededEnergyForDay = harvesters.Sum(x => x.Value.EnergyRequirement); if (totalEnergyStored >= harvestNeededEnergyForDay) { if (this.mode == "Full") { dayOre += harvesters.Sum(x => x.Value.OreOutput); totalEnergyStored -= harvestNeededEnergyForDay; } else if (this.mode == "Half") { dayOre += harvesters.Sum(x => (x.Value.OreOutput * 50) / 100); totalEnergyStored -= (harvestNeededEnergyForDay * 60) / 100; } totalMinedOre += dayOre; } sb.AppendLine("A day has passed."); sb.AppendLine($"Energy Provided: {dayEnergy}"); sb.AppendLine($"Plumbus Ore Mined: {dayOre}"); return(sb.ToString().Trim()); }
public string PrintFooter(IDictionary <IGeometricShape, decimal> shapes) { StringBuilder sb = new StringBuilder(); sb.Append("<br/>TOTAL:<br/>"); sb.Append($"{shapes.Count} shapes "); sb.Append($"Perimeter: {shapes.Sum(x => x.Key.CalculatePerimeter()):#.##} "); sb.Append($"Area: {shapes.Sum(x => x.Key.CalculateArea()):#.##}"); return(sb.ToString()); }
/// <summary> /// Invoked during fetch (i.e. repository load) /// </summary> /// <param name="values"></param> public void LoadValues(SimilarityValue[] values) { if (values == null) { throw new ArgumentNullException("values"); } foreach (var value in values) { //TODO: Find out why this bug occur // i.e. sometimes the exact same value is repeated over multiple values. if (_similarityValues.ContainsKey(value.Value)) { _similarityValues[value.Value].IncreaseUsage(ValueCount); } else { _similarityValues.Add(value.Value, value); } } if (ValueCount == 0) { ValueCount = _similarityValues.Sum(x => x.Value.Count); } }
public Result Run(decimal principal, DateTime?startTime = null, DateTime?endTime = null) { if (_weightings == null || !_weightings.Any()) { throw new ArgumentException("You should have at least one candle set for calculation"); } // Distribute principal to each candle set decimal totalWeight = _weightings.Sum(w => w.Value); IReadOnlyDictionary <IEnumerable <IOhlcv>, decimal> preAssetCashMap = _weightings.ToDictionary(w => w.Key, w => principal * w.Value / totalWeight); var assetCashMap = preAssetCashMap.ToDictionary(kvp => kvp.Key, kvp => kvp.Value); // Init transaction history var transactions = new List <Transaction>(); // Loop with each asset for (int i = 0; i < _weightings.Count; i++) { var asset = assetCashMap.ElementAt(i).Key; var startIndex = asset.FindIndexOrDefault(c => c.DateTime >= (startTime ?? DateTimeOffset.MinValue), 0).Value; var endIndex = asset.FindLastIndexOrDefault(c => c.DateTime <= (endTime ?? DateTimeOffset.MaxValue), asset.Count() - 1).Value; using (var context = new AnalyzeContext(asset)) { var executor = CreateBuySellRuleExecutor(context, _calculator, assetCashMap, transactions); executor.Execute(startIndex, endIndex); } } return(new Result(preAssetCashMap, assetCashMap, transactions)); }
private void CalculatePayments() { // find total shares within the round. var totalShares = _shares.Sum(pair => pair.Value); // loop through user shares and calculate the payouts. foreach (var pair in _shares) { var percent = pair.Value / totalShares; var amount = (decimal)percent * Block.Reward; // get the user id for the payment. var user = _accountManager.GetAccountByUsername(pair.Key); if (user == null) { _accountManager.AddAccount(new Account(-1, pair.Key, pair.Key)); user = _accountManager.GetAccountByUsername(pair.Key); } // if we can't find a user for the given username, just skip. if (user == null) { continue; } Payments.Add(new Payment(Block, user.Id, amount)); } // mark the block as accounted Block.Accounted = true; }
public override void LifeCycle(float delta) { time += delta; if (time < LIFECYCLE_TICK_TIME || plantBlocks.Count == 0) { return; } time = 0; List <Tuple <IntVector3, byte> > grassThatDied = new List <Tuple <IntVector3, byte> >(); // kill off some grass if there is too little gas float numberToDie = GRASS_DEATH_RATE * GAS_REQUIREMENTS.Sum(kvPair => Mathf.Max(kvPair.Value - atmosphere.GetGasProgress(kvPair.Key), 0)); while (numberToDie > 0 && plantBlocks.Count > 0) { if (numberToDie < 1 && randGen.NextDouble() > numberToDie) { break; } int idx = randGen.Next(plantBlocks.Count); IntVector3 block = plantBlocks.ElementAt(idx); DeregisterGrassAt(block); grassThatDied.Add(Tuple.Create(block, RED_ROCK_ID)); numberToDie--; } terrain.SetBlocks(grassThatDied); Spread(); }
public Order(Psorder dao, ILocation location, IUser user) { Dao = dao ?? throw new ArgumentNullException(paramName: nameof(dao)); if (location.Dao.LocationId != dao.Location.LocationId) { throw new InvalidOperationException("location inconsistent between dto and dao."); } Location = location; if (user.Dao.UserId != dao.User.UserId) { throw new InvalidOperationException("user inconsistent between dto and dao."); } User = user; Time = Dao.OrderTime; ID = Dao.OrderId; IDictionary <decimal, int> pizzasByPrice = Dao.PsorderPart.ToDictionary(x => x.Price, x => x.Qty); ValidatePizzas(pizzasByPrice); PizzasByPrice = pizzasByPrice.Where(p => p.Value != 0).ToImmutableDictionary(p => p.Key, p => p.Value); TotalValueUsd = pizzasByPrice.Sum(x => x.Key * x.Value); if (TotalValueUsd > MaxTotalValueUsd) { throw new ArgumentException(message: $"order should not exceed ${MaxTotalValueUsd} in total value.", paramName: nameof(pizzasByPrice)); } }
public BorderAnalysis(IDictionary <Color, int> colors, IDictionary <int, int> buckets, int colorThreshold, float bucketThreshold) { if (colors == null) { throw new ArgumentNullException(nameof(colors)); } Colors = colors.Count; var mostPresentColor = colors.OrderByDescending(x => x.Value) .First(); var mostPresentBucket = mostPresentColor.Key.ToColorBucket(); Background = mostPresentColor.Key; if (buckets.Count > 0) { BucketRatio = buckets[mostPresentBucket] / (float)buckets.Sum(x => x.Value); } else { BucketRatio = 1; } if (bucketThreshold >= 1.0f) { Success = colors.Count > 0 && colors.Count < colorThreshold; } else { Success = colors.Count > 0 && (colors.Count <colorThreshold || BucketRatio> bucketThreshold); } }
public decimal ApplyPromotion(IList <Product> products, IDictionary <string, int> groupedItem) { int comboUnit = 0; try { decimal unitPrice = (groupedItem.ContainsKey("d")) ? products.Where(x => x.Sku.Equals("d", System.StringComparison.InvariantCultureIgnoreCase)).FirstOrDefault().Unitprice : 0; //Check no of C and D pair. if (groupedItem.ContainsKey("c") && groupedItem.ContainsKey("d")) { comboUnit = ((groupedItem["c"] + groupedItem["d"]) - Math.Abs(groupedItem["c"] - groupedItem["d"])) / 2; } //If pair found, then subtracting the discount on those pair from total amount. var total = groupedItem.Sum(p => (p.Key.Equals("d", System.StringComparison.InvariantCultureIgnoreCase)) ? p.Value * unitPrice - (comboUnit * 5) : 0); return(total); } catch (Exception ex) { throw ex; } }
//public static double ObtainProbabilityOfSpam(IDictionary<string, uint> testWordsInEmail, Dictionary<string, WordCount> trainingEmails) //{ // double probabilitySpam = 0; // var totalWords = testWordsInEmail.Sum(w => w.Value); // foreach (var u in testWordsInEmail) // { // double wordWeight = 1.0 * u.Value / totalWords; // // Count all the times this word was spam // WordCount wordCount; // if (!trainingEmails.TryGetValue(u.Key, out wordCount)) // { // // If we've never sen the word, then it's 50/50 // wordCount = new WordCount(); // wordCount.Add(false, 1); // wordCount.Add(true, 1); // } // double probabilityOfWordBeingSpam = 1.0 * wordCount.SpamCount/(wordCount.SpamCount + wordCount.HamCount); // probabilitySpam += wordWeight*probabilityOfWordBeingSpam; // } // return probabilitySpam; //} public static Tuple <double, double> ObtainProbabilityOfSpam(IDictionary <string, uint> testWordsInEmail, Dictionary <string, WordCount> trainingEmails, double totalProbabilitySpam, int totalNumberOfWords) { double probabilitySpam = 0, probabilityHam = 0; double totalProbabilityHam = 1 - totalProbabilitySpam; var totalWords = testWordsInEmail.Sum(w => w.Value); foreach (var u in testWordsInEmail) { double wordWeight = 1.0 * u.Value / totalWords; // Count all the times this word was spam WordCount wordCount; if (!trainingEmails.TryGetValue(u.Key, out wordCount)) { // laplace smoothing wordCount = new WordCount(); } const double smoothingNum = 100; // Feel free to change this # double probabilityOfWordBeingSpam = (1.0 * wordCount.SpamCount + smoothingNum) / (wordCount.SpamCount + wordCount.HamCount + smoothingNum); double probabilityOfWordBeingHam = (1.0 * wordCount.HamCount + smoothingNum) / (wordCount.SpamCount + wordCount.HamCount + smoothingNum); probabilitySpam += Math.Log(probabilityOfWordBeingSpam); probabilityHam += Math.Log(probabilityOfWordBeingHam); } probabilitySpam += Math.Log(totalProbabilitySpam); probabilityHam += Math.Log(totalProbabilityHam); return(new Tuple <double, double>(probabilitySpam, probabilityHam)); }
public long PopulateForDays(int days) { for (var i = 0; i < days; i++) { var nextG = GetNewDict(); for (var j = 0; j <= 8; j++) { if (j == 0) { nextG[6] += _population[j]; nextG[8] += _population[j]; } else { nextG[j - 1] += _population[j]; } } foreach (var g in nextG) { _population[g.Key] = g.Value; } } return(_population.Sum(x => x.Value)); }
// Tolerances are needed due to a combination of false-positive failures and real product-issues. // - [Nested-Edge] [Broker-Enabled]: Sometimes we get an excessive amount of 501 and 504 status codes. // Fail the tests if we have > 0.1% of either code. // + (501) We don't receive some desired properties in module-registered twin desired property callback // + (504) Module cannot make reported property update bool IsPassed(IDictionary <int, int> statusCodesToCount) { bool isPassed = true; int totalResults = statusCodesToCount.Sum(x => x.Value); if (totalResults == 0) { return(false); } if (this.Topology == Topology.Nested && this.MqttBrokerEnabled) { // See TwinTester/StatusCode.cs for reference. int[] bigToleranceStatusCodes = { }; int[] littleToleranceStatusCodes = { 501, 504 }; isPassed = this.GeneratePassResult(statusCodesToCount, bigToleranceStatusCodes, littleToleranceStatusCodes); } else { List <int> statusCodes = statusCodesToCount.Keys.ToList(); IEnumerable <int> failingStatusCodes = statusCodes.Where(s => { string statusCode = s.ToString(); return(!statusCode.StartsWith("2")); }); isPassed = failingStatusCodes.Count() == 0; } return(isPassed); }
private void InitializeDeviceClients() { _logger.LogInformation("Initializing device clients..."); var tenantConfiguration = new List <TenantConfiguration>(); _configuration.Bind("SimulatedDevices:Tenants", tenantConfiguration); foreach (var tenant in tenantConfiguration) { var deviceClients = new List <DeviceInfo>(); for (int i = 0; i < tenant.NumberOfDevices; i++) { deviceClients.Add(new DeviceInfo { DeviceId = TenantConfiguration.BuildDeviceName(tenant, i), DeviceClient = DeviceClient.CreateFromConnectionString( _configuration["IotHubConnectionString"], TenantConfiguration.BuildDeviceName(tenant, i)) }); } _tenantDeviceClients.Add(tenant, deviceClients); } _logger.LogInformation($"Registered {_tenantDeviceClients.Sum(t => t.Value.Count)} device clients"); }
public void ArchiveQueryTest(string pointMask, string startTime, string endTime, double minValue, double maxValue) { var now = AFTime.Now; var st = new AFTime(startTime, now); var et = new AFTime(endTime, now); Output.WriteLine($"Start to execute PI Data Archive queries on PI Points matching [{pointMask}] " + $"between [{st}] and [{et}]."); IList <IEnumerable <PIPointQuery> > queries = PIPointQuery.ParseQuery(Fixture.PIServer, pointMask); IEnumerable <PIPoint> pointList = PIPoint.FindPIPoints(Fixture.PIServer, queries).ToList(); IDictionary <string, AFValues> events = Fixture.ReadPIEvents(pointList, st, et); // Verify all event values are in the expected range foreach (var ptvaluespair in events) { foreach (var val in ptvaluespair.Value.Where(val => val.IsGood)) { var convertedValue = Convert.ToDouble(val.Value, CultureInfo.InvariantCulture); Assert.True(convertedValue >= minValue && convertedValue <= maxValue, $"[{ptvaluespair.Key}] has a value [{val.Value}] outside of expected data range of " + $"[{minValue} ~ {maxValue}]"); } } Output.WriteLine($"Found {events.Sum(kvp => kvp.Value.Count)} PI events."); }
/// <summary> /// Picks a random element from a weighted sequence. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="src"></param> /// <returns></returns> public static T PickWeighted <T>(this IDictionary <T, double> src, PRNG prng = null) { var total = src.Sum(kvp => kvp.Value); double num; if (prng == null) { num = RandomHelper.Next(total); } else { num = prng.Next(total); } double sofar = 0; foreach (var kvp in src) { sofar += kvp.Value; if (num < sofar) { return(kvp.Key); } } return(default(T)); // nothing to pick... }
private string GetAllResultsTables(IDictionary <IProjectFile, IEnumerable <Result> > allResults) { if (!allResults.Any()) { return(string.Empty); } int MaxLength(Func <Result, int> field) => allResults.Max(results => results.Value.Any() ? results.Value.Max(field) : 0); var namePad = Math.Max("Package".Length, MaxLength(r => r.Name.Length)); var installedPad = Math.Max("Installed".Length, MaxLength(r => r.Installed?.Version.ToString().Length ?? 0)); var latestPad = Math.Max("Latest".Length, MaxLength(r => r.Latest?.Version.ToString().Length ?? 0)); var msg = new StringBuilder(); foreach (var results in allResults) { msg.AppendLine(GetResultsTable(results, namePad, installedPad, latestPad)); } if (allResults.Count > 1) { var allTotal = allResults.Sum(ar => ar.Value.Sum(r => r.YearsBehind)); msg.AppendLine($"Total is {allTotal:F1} libyears behind"); } return(msg.ToString()); }
public IHsmModelHolder Apply(IDictionary <IHsmModelHolder, double> probabilitiesByModels) { double totalSum = probabilitiesByModels.Sum(x => x.Value); IDictionary <IHsmModelHolder, double> weightedProbabilitiesByModels = probabilitiesByModels.ToDictionary(entry => entry.Key, entry => Math.Log10(entry.Value / totalSum)); return(weightedProbabilitiesByModels.Aggregate((l, r) => l.Value > r.Value ? l : r).Key); }
private int RecalculateLevelingActivityValue(IDictionary <Guid, int> linkedTasks, ISet <Guid> completedLinkedTasks) { var totalStorypoints = linkedTasks.Sum(x => x.Value); var storypointsCompleted = linkedTasks.Where(lt => completedLinkedTasks.Contains(lt.Key)).Sum(x => x.Value); return(storypointsCompleted * 100 / totalStorypoints); }
private static bool DiceIsValid(IDictionary <int, DiceFace> diceFaces) { const double tolerance = 0.00000000000001; var sum = diceFaces.Sum(f => f.Value.Probability); return(Math.Abs(1.0 - sum) < tolerance); }
private void OnNameValueListRead(IDictionary <string, string> list) { NameValueListCount += 1; var size = list.Sum(kvp => kvp.Key.Length * 2 + kvp.Value.Length * 2); NameValueListTotalSize += size; NameValueListLargest = Math.Max(NameValueListLargest, size); }
/// <summary> /// Writes total captured rowcounts to log and graphite /// </summary> private void RecordRowCounts(IDictionary <string, long> changesCaptured) { long total = changesCaptured.Sum(x => x.Value); logger.Log("Total rowcount across all tables: " + total, LogLevel.Info); string key = string.Format("db.mssql_changetracking_counters.RowCountsMaster.{0}.{1}", Config.Master.Replace('.', '_'), Config.MasterDB); logger.Increment(key, total); }
public static double CalculateStrategicEstimation(IDictionary <Aspect, double> theMostValuableGlobalPrioritiesOfAspects) { double sum = theMostValuableGlobalPrioritiesOfAspects.Sum(x => x.Value); return(theMostValuableGlobalPrioritiesOfAspects.Select(aspect => aspect.Value / sum * (aspect.Key.Intensity.NominalValue - aspect.Key.Intensity.LowerBound) / (aspect.Key.Intensity.UpperBound - aspect.Key.Intensity.LowerBound)) .Sum()); }
private async Task Commit() { Guid eventId; EventIds.TryRemove(CommitId, out eventId); var headers = new Dictionary <string, string> { [CommitHeader] = CommitId.ToString(), [TerminatingEventIdHeader] = eventId.ToString() // Todo: what else can we put in here? }; var allRepos = _repositories.Values.Concat(_entityRepositories.Values).Concat(_pocoRepositories.Values).ToArray(); var changedStreams = _repositories.Sum(x => x.Value.ChangedStreams) + _entityRepositories.Sum(x => x.Value.ChangedStreams); Logger.Write(LogLevel.Debug, () => $"Detected {changedStreams} changed streams in commit {CommitId}"); if (changedStreams > 1) { Logger.Write(LogLevel.Info, () => $"Starting prepare for commit id {CommitId} with {_repositories.Count + _entityRepositories.Count + _pocoRepositories.Count} tracked repositories"); using (PrepareTime.NewContext()) { // First check all streams read but not modified - if the store has a different version a VersionException will be thrown await allRepos.SelectAsync(x => x.Prepare(CommitId)).ConfigureAwait(false); } } // this log message can be expensive as the list is computed for a check // so only warn users about multiple stream commits when debugging Logger.Write(LogLevel.Debug, () => { var orderedRepos = _repositories.Select(x => new Tuple <int, IRepository>(x.Value.ChangedStreams, x.Value)) .Concat(_entityRepositories.Select(x => new Tuple <int, IRepository>(x.Value.ChangedStreams, x.Value))); if (orderedRepos.Count(x => x.Item1 != 0) > 1) { return($"Starting commit id {CommitId} with {_repositories.Count + _entityRepositories.Count + _pocoRepositories.Count} tracked repositories. You changed {orderedRepos.Sum(x => x.Item1)} streams. We highly discourage this https://github.com/volak/Aggregates.NET/wiki/Changing-Multiple-Streams"); } return($"Starting commit id {CommitId} with {_repositories.Count + _entityRepositories.Count + _pocoRepositories.Count} tracked repositories"); }); using (var ctx = CommitTime.NewContext()) { await allRepos.SelectAsync(x => x.Commit(CommitId, headers)).ConfigureAwait(false); if (ctx.Elapsed > TimeSpan.FromSeconds(1)) { SlowLogger.Write(LogLevel.Warn, () => $"Commit id {CommitId} took {ctx.Elapsed.TotalSeconds} seconds!"); } Logger.Write(LogLevel.Info, () => $"Commit id {CommitId} took {ctx.Elapsed.TotalMilliseconds} ms"); } }
public RosterAssignmentAnalyzer(IDictionary<PositionAbbreviation, RosterPosition> rosterPositions, ICollection<Player> availablePlayers) { if (availablePlayers.Count > rosterPositions.Sum(rp => rp.Value.Count)) { throw new ArgumentException("There are more players than roster positions available."); } _rosterPositions = rosterPositions; _availablePlayers = availablePlayers; }
public float ComputeCompletionPercent() { IDictionary <string, float> status = this.ComputeCompletionStatus(); if (status.Count == 0) { return(0f); } return(status.Sum(kv => kv.Value) / (float)status.Count); }
public static double GetAverageValue(this IDictionary <long, uint> data) { if (data == null) { return(0); } ulong sampleCount = Convert.ToUInt64(data.Sum(sample => sample.Value)); return(GetAverageValue(data, sampleCount)); }
private static void AnswerResultChanged(Guid answerId, QuestionViewModel shownQuestion, IDictionary <Guid, int> votesMutable) { var changedAnswer = shownQuestion.Answers.Single(a => a.Id == answerId); var currentVotes = ++votesMutable[changedAnswer.Id]; var allVotes = votesMutable.Sum(v => v.Value); var message = $"\rNew vote for: {changedAnswer.Text}. Now: {GetPercentage(currentVotes, allVotes)} ({currentVotes} votes)" .PadRight(Console.WindowWidth); Console.Write(message); }
private TimeSpan CalculateTime() { long duracao = 0; duracao += _durationHistory.Sum(_ => _.Value); if (_startTime.HasValue) { duracao += (DateTime.Now - _startTime.Value).Ticks; } return(new TimeSpan(duracao)); }
public double Value(IDictionary<string, double> p1, IDictionary<string, double> p2) { double dot = 0; string[] keys = p2.Keys.ToArray(); foreach (string key in keys) if (p1.ContainsKey(key)) dot += Math.Sqrt(p1[key] * p2[key]); return -dot / Math.Sqrt(p1.Sum(c => c.Value)) * Math.Sqrt(p2.Sum(c => c.Value)); }
private void UpdateProgress() { int translatedCount = _translationItems.Sum(p => p.Value.Count(translateItem => !string.IsNullOrEmpty(translateItem.TranslatedValue))); int totalCount = _translationItems.Count; var progresMsg = string.Format(_translateProgressText.Text, translatedCount, totalCount); if (translateProgress.Text != progresMsg) { translateProgress.Text = progresMsg; toolStrip1.Refresh(); } }
private List <IDataItem> MergeUnsavedItems(IDictionary <string, IList <IDataItem> > unsavedItems) { int totalCount = unsavedItems.Sum((p) => p.Value.Count); List <IDataItem> retval = new List <IDataItem>(totalCount); foreach (var values in unsavedItems.Values) { retval.AddRange(values); } retval.Sort(new Comparison <IDataItem>((x, y) => x.DateTime.CompareTo(y.DateTime))); return(retval); }
static void GenerateReport(IDictionary<Uri, long> dump, IDictionary<string, long> errors) { var totalCount = dump.Sum(pair => pair.Value); var distinctCount = dump.Count; var errorCount = errors.Sum(pair => pair.Value); var top10 = dump.OrderByDescending(pair => pair.Value).Take(10).ToList(); var top10Errors = errors.OrderByDescending(pair => pair.Value).Take(10).ToList(); Console.Clear(); Console.WriteLine($"Total Count:\t\t{totalCount}"); Console.WriteLine($"Distinct Count:\t\t{distinctCount}"); Console.WriteLine($"Error Count:\t\t{errorCount}"); Console.WriteLine(); Console.WriteLine("Top 10:"); top10.ForEach(pair => Console.WriteLine($" ({pair.Value}) {pair.Key}"));Console.WriteLine(); Console.WriteLine("Top 10 Errors:"); top10Errors.ForEach(pair => Console.WriteLine($" ({pair.Value}) {pair.Key}")); }
// ReSharper restore UnusedMember.Local public Genie(IDictionary<string, AnswerStatistic> answerStatistics, int answeringChoicesCount) { this.answerStatistics = answerStatistics; this.answeringChoicesCount = answeringChoicesCount; answersGuessedCount = answerStatistics.Sum(s => s.Value.AnswerCount); questionStatistics = answerStatistics.SelectMany(s => s.Value.AnsweredQuestionsById) .GroupBy(p => p.Key) .ToDictionary(g => g.Key, g => new QuestionStatistic { ChoicesFrequencies = g.Aggregate(new int[answeringChoicesCount], (curr, p) => { for (int i = 0; i < answeringChoicesCount; i++) { curr[i] += p.Value.ChoicesFrequencies[i]; } return curr; }) }); }
private Dictionary<Allele, double> getAlleleFreqs(IDictionary<Genotype, int> genotypeCounts) { // Get the total count of Alleles long totalPop = genotypeCounts.Sum(pair => (long)pair.Value); long totalAlleles = 2 * totalPop; // Get the count of each Allele (twice in homozygotes, once in heterozygotes) Dictionary<Allele, long> alleleCounts = new Dictionary<Allele, long>(); Genotype[] genotypes = genotypeCounts.Keys.ToArray(); foreach (Genotype g in genotypes) { int count = genotypeCounts[g]; // Increment the first Allele's count if (alleleCounts.ContainsKey(g.Allele1)) alleleCounts[g.Allele1] += count; else alleleCounts.Add(g.Allele1, count); // Increment the second Allele's count if (alleleCounts.ContainsKey(g.Allele2)) alleleCounts[g.Allele2] += count; else alleleCounts.Add(g.Allele2, count); } // Divide by total to get Allele frequencies Dictionary<Allele, double> alleleFreqs = alleleCounts.ToDictionary( pair => pair.Key, pair => (double)pair.Value / (double)totalAlleles ); return alleleFreqs; }
/// <summary> /// Writes total captured rowcounts to log and graphite /// </summary> private void RecordRowCounts(IDictionary<string, long> changesCaptured) { long total = changesCaptured.Sum(x => x.Value); logger.Log("Total rowcount across all tables: " + total, LogLevel.Info); string key = string.Format("db.mssql_changetracking_counters.RowCountsMaster.{0}.{1}", Config.Master.Replace('.', '_'), Config.MasterDB); logger.Increment(key, total); }
private int GetWordCount(IDictionary<string, int> words) { return words.Sum(pair => pair.Value); }
private void WriteHeader(StreamWriter sw, IDictionary<File, string> files, TypeDefinition[] types) { var total = files.Sum(x => x.Key.Annotations.Length); sw.WriteLine("<html>"); sw.WriteLine("<head>"); sw.WriteLine("<meta charset='utf-8' />"); sw.WriteLine("<title>Warning Report</title>"); sw.WriteLine("<link rel='stylesheet' type='text/css' href='report.css' />"); sw.WriteLine("</head><body><div class='container'>"); sw.WriteLine("<h1>Summary</h1>"); sw.WriteLine("<table class='overview'>"); sw.WriteLine("<colgroup>"); sw.WriteLine("<col width='160' />"); sw.WriteLine("<col />"); sw.WriteLine("</colgroup>"); sw.WriteLine("<tbody>"); sw.WriteLine("<tr><th>Generated on:</th><td>" + DateTime.Now.ToShortDateString() + " " + DateTime.Now.ToShortTimeString() + "</td></tr>"); WriteStats(sw, files, types, total); sw.WriteLine("</tbody>"); sw.WriteLine("</table>"); sw.WriteLine("<h1>Files</h1>"); }