private MarkerOptions CreateStopMarker(BitmapDescriptor bitmapDescriptor, params Stop[] stops) { Stop stop = stops[0]; return(new MarkerOptions() .SetPosition(new LatLng(stops.Average(s => s.Position.Latitude), stops.Average(s => s.Position.Longitude))) .SetIcon(bitmapDescriptor) .SetTitle(stop.Name) .Anchor(0.5f, 0.5f)); }
public void Save() { var sw2 = new Stopwatch(); sw2.Start(); var deleted = 0; using (var s = GetStore()) { lock ( Destinations ) { if (!Destinations.Any()) { return; } var avgageage = Destinations.Average(ds => (double)(ulong)ds.Value.LastSeen); foreach (var one in Destinations.ToArray()) { if (avgageage - (ulong)one.Value.LastSeen > twoweeks) { if (one.Value.StoreIx > 0) { s.Delete(one.Value.StoreIx); one.Value.StoreIx = -1; } Destinations.Remove(one.Key); ++deleted; continue; } var rec = new BufLen[] { (BufLen)(int)StoreRecordId.DestinationStatistics, new BufLen(one.Value.ToByteArray()) }; if (one.Value.StoreIx > 0) { s.Write(rec, one.Value.StoreIx); } else { one.Value.StoreIx = s.Write(rec); } } } } sw2.Stop(); DebugUtils.Log("Statistics save: " + sw2.Elapsed.ToString() + ", " + deleted.ToString() + " deleted."); }
static void Main(string[] args) { var r = RandomProvider.GetThreadRandom(); Dictionary <int, int> resultDictionary = new Dictionary <int, int>(); for (int x = 1; x <= 1000; x++) { Dictionary <string, int> dictionary = new Dictionary <string, int>(); try { while (true) { int rand = r.Next(0, 1073741823); CrockfordBase32Encoding encoding = new CrockfordBase32Encoding(); string encodedRand = encoding.Encode((ulong)rand, false); dictionary.Add(encodedRand, rand); } } catch (Exception) { } Console.WriteLine("{0} - {1}", x, dictionary.Count); resultDictionary.Add(x, dictionary.Count); x++; } Console.WriteLine(); Console.WriteLine("Average Number Before Duplicate: " + resultDictionary.Average(x => x.Value)); Console.WriteLine("Minimum Number Before Duplicate: " + resultDictionary.Min(x => x.Value)); Console.WriteLine("Maximum Number Before Duplicate: " + resultDictionary.Max(x => x.Value)); Console.WriteLine(" Median Number Before Duplicate: " + resultDictionary.Select(x => x.Value).Median()); Console.ReadLine(); }
private static void GetBasicStat() { Dictionary <string, int> user2fidcnt = new Dictionary <string, int>(); Dictionary <string, int> fid2usercnt = new Dictionary <string, int>(); GetBasicStat(user2fidcnt, fid2usercnt); var word2doc = Loader.LoadWord2Doccnt(); var url002factcnt = Loader.LoadUrl2factcnt00(); var url012factcnt = Loader.LoadUrl2factcnt01(); var url022factcnt = Loader.LoadUrl2factcnt02(); var url032factcnt = Loader.LoadUrl2factcnt03(); Console.WriteLine("Total User Cnt: {0}", user2fidcnt.Count); Console.WriteLine("Total Fid Cnt: {0}", fid2usercnt.Count); Console.WriteLine("Max fidcnt per user: {0}", user2fidcnt.Max(a => a.Value)); Console.WriteLine("Min fidcnt per user: {0}", user2fidcnt.Min(a => a.Value)); Console.WriteLine("Avg fidcnt per user: {0}", user2fidcnt.Average(a => a.Value)); Console.WriteLine("Word Cnt : {0}", word2doc.Count); Console.WriteLine("URL dep0 cnt : {0}", url002factcnt.Count); Console.WriteLine("URL dep1 cnt : {0}", url012factcnt.Count); Console.WriteLine("URL dep2 cnt : {0}", url022factcnt.Count); Console.WriteLine("URL dep3 cnt : {0}", url032factcnt.Count); }
private async Task DownloadFile(string url, ProgressBar prgx, string fileName, CookieContainer cookieContainer = null) { fileDownloader = new AsyncFileDownloader(); fileDownloader.Progress = new Progress <DownloadProgress>((progress) => { if (ChunkProgress.ContainsKey(progress.ChunkIndex)) { ChunkProgress[progress.ChunkIndex] = progress.Percentage; } else { ChunkProgress.Add(progress.ChunkIndex, progress.Percentage); } int combinedProgress = (int)ChunkProgress.Average(x => x.Value); if (combinedProgress > prgx.Value) { prgx.Value = combinedProgress; lblProgressPercentage.Text = prgx.Value + " %"; lblCurrentStatus.Text = "Downloading..."; } }); cancellationSource = new CancellationTokenSource(); fileDownloader.CancellationToken = cancellationSource.Token; fileDownloader.CookieContainer = cookieContainer; fileDownloader.DownloadDirectory = ApplicationConfiguration.DownloadConfiguration.DownloadDirectory; fileDownloader.ParallelDownloads = ApplicationConfiguration.DownloadConfiguration.MaxThreads; fileDownloader.FileName = RemoveIllegalCharactersFromFilename(fileName); await fileDownloader.DownloadFileAsync(url); PostDownloadUI(); }
public static IEnumerable <DataRow> BM25(IEnumerable <DataRow> data, int K1 = 100, double B = 0.5) { if (data == null) { throw new ArgumentNullException(nameof(data)); } var users = new Dictionary <string, int>(); var items = new Dictionary <string, int>(); foreach (var userItem in data) { users.TryGetValue(userItem.UserId, out var count1); users[userItem.UserId] = count1 + 1; items.TryGetValue(userItem.ItemId, out var count2); items[userItem.ItemId] = count2 + 1; } var n = users.Count; var averageLength = users.Average(o => o.Value); return(data .Select( userItem => { var idf = Math.Log(n) - Math.Log(1 + items[userItem.ItemId]); var lengthNorm = 1.0 - B + (B * users[userItem.UserId] / averageLength); var confidence = userItem.Confidence * (K1 + 1.0) / ((K1 * lengthNorm) + userItem.Confidence) * idf; return new DataRow(userItem.UserId, userItem.ItemId, confidence); })); }
public static Dictionary <string, Dictionary <string, double> > BM25(Dictionary <string, Dictionary <string, double> > data, int K1 = 100, double B = 0.5) { if (data == null) { throw new ArgumentNullException(nameof(data)); } var n = data.Keys.Count; var idf = data .SelectMany(o => o.Value) .GroupBy(o => o.Key) .ToDictionary(o => o.Key, o => Math.Log(n) - Math.Log(1 + o.Count())); var averageLength = data.Average(o => o.Value.Count); var lengthNorm = data .ToDictionary(o => o.Key, o => 1.0 - B + (B * o.Value.Count / averageLength)); var weighted = data .SelectMany(o => o.Value, (o, p) => new { UserId = o.Key, ItemId = p.Key, Confidence = p.Value }) .Select(o => new { o.UserId, o.ItemId, Confidence = o.Confidence * (K1 + 1.0) / ((K1 * lengthNorm[o.UserId]) + o.Confidence) * idf[o.ItemId] }) .GroupBy(o => o.UserId) .ToDictionary(o => o.Key, o => o.ToDictionary(p => p.ItemId, p => p.Confidence)); return(weighted); }
public void GetDictItem() { // Output all foreach (var item in flowerDict) { Console.WriteLine("{0} = {1}", item.Key, item.Value); } Console.WriteLine(); // Calculate the average var average = flowerDict.Average(x => x.Value); Console.WriteLine(average); Console.WriteLine(); // Calculate the total var total = flowerDict.Sum(x => x.Value); Console.WriteLine(total); Console.WriteLine(); // Outputs a Key of 5 characters or less var items = flowerDict.Where(x => x.Key.Length <= 5); foreach (var item in items) { Console.WriteLine("{0} = {1}", item.Key, item.Value); } Console.WriteLine(); }
private (int team1Score, int team2Score) CalculateFinalScores() { int team1TotalScore = Convert.ToInt32(_team1SeasonScoreResults.Average(x => x.Value)); int team2TotalScore = Convert.ToInt32(_team2SeasonScoreResults.Average(x => x.Value)); return(team1TotalScore, team2TotalScore); }
public Dictionary <String, long> GetIPTorrentDistribution() { Dictionary <String, long> categories = new Dictionary <string, long>(); var request = new SearchRequest("relation", "doc") { Source = false, Aggregations = new TermsAggregation("state") { Size = 2147483647, Field = "IPs", Order = new List <TermsOrder> { TermsOrder.KeyAscending } } }; var result = client.Search <RelationModel>(request); var aggs = result.Aggregations.Terms("state"); foreach (var bucket in aggs.Buckets) { categories.Add(bucket.Key, bucket.DocCount.Value); } // Try to limit nr of points double avg = categories.Average(r => r.Value); IEnumerable <KeyValuePair <String, long> > x = categories.Where(r => r.Value > avg); Dictionary <String, long> simplifiedCategory = x.ToDictionary(aa => aa.Key, a => a.Value); return(categories); //or simplifiedCategory TODO: Decide here }
public virtual Pawn FindVictim(Pawn rapist, Map m) { //Log.Message("[RJW]" + this.GetType().ToString() + "::TryGiveJob( " + xxx.get_pawnname(rapist) + " ) map " + m?.ToString()); if (rapist == null || m == null) { return(null); } //Log.Message("[RJW]" + this.GetType().ToString() + "::TryGiveJob( " + xxx.get_pawnname(rapist) + " ) can rape " + xxx.can_rape(rapist)); if (requireCanRape && !xxx.can_rape(rapist)) { return(null); } List <Pawn> validTargets = new List <Pawn>(); float min_fuckability = 0.10f; // Don't rape pawns with <10% fuckability float avg_fuckability = 0f; // Average targets fuckability, choose target higher than that var valid_targets = new Dictionary <Pawn, float>(); // Valid pawns and their fuckability Pawn chosentarget = null; // Final target pawn IEnumerable <Pawn> targets = m.mapPawns.AllPawnsSpawned.Where(x => !x.IsForbidden(rapist) && x != rapist && x.HostileTo(rapist) && rapist.CanReserveAndReach(x, PathEndMode.Touch, Danger.Some, xxx.max_rapists_per_prisoner, 0) && IsValidTarget(rapist, x)) .ToList(); if (targets.Any(x => IsBlocking(rapist, x))) { return(null); } foreach (var target in targets) { if (!xxx.can_path_to_target(rapist, target.Position)) { continue; // too far } float fuc = GetFuckability(rapist, target); if (fuc > min_fuckability) { valid_targets.Add(target, fuc); } } if (valid_targets.Any()) { avg_fuckability = valid_targets.Average(x => x.Value); // choose pawns to f**k with above average fuckability var valid_targetsFiltered = valid_targets.Where(x => x.Value >= avg_fuckability); if (valid_targetsFiltered.Any()) { chosentarget = valid_targetsFiltered.RandomElement().Key; } } return(chosentarget); }
public void Show() { lock (safeGard) { recordCount = history.Count; if (recordCount > 1000) { history.Clear(); } AvarageData.humidety = history.Average(x => x.Value.humidety); AvarageData.presur = history.Average(x => x.Value.presur); AvarageData.temp = history.Average(x => x.Value.temp); ShowEvent.Invoke(); } }
public int AverageSR() { if (Members.Count == 0) { throw new NotImplementedException(); } return(Convert.ToInt32(Members.Average(x => x.Key.Player_Rank))); }
/// <summary> /// Расчет даты-времени перегрузки /// </summary> /// <param name="alignmentCoefficient">Коэффициент выравнивания загружености клиентов</param> /// <returns>Дата-время перегрузки</returns> private DateTime СalculationСriticalTimeRead(double alignmentCoefficient) { DateTime nowNime = DateTime.UtcNow.ToLocalTime(); double averageTime = ClientDictionary.Average(GetTimeReading); double reloadTime = averageTime * alignmentCoefficient; DateTime reloadTimeRead = nowNime.AddSeconds(reloadTime); return(reloadTimeRead); }
/// <summary> /// after a picture is loaded or avg button is hit or colum header is sorted, /// display data accordingly again with this method /// </summary> private void ShowDictionary() { freqAvg = (int)myDicTable.Average(x => x.Value); Avgbutton.Text = $"Average : {freqAvg}";//=null; ???first,then??? _bs.DataSource = myDicTable.ToList(); DGV.Columns[0].HeaderText = "Key"; DGV.Columns[1].HeaderText = "Value"; DGV.Columns[0].DefaultCellStyle.Format = "X2"; }
public async Task <PlayerStats> GetPlayerStats(string name) { var result = new PlayerStats(); var totalKills = 0; var totalDeath = 0; var totalRating = 0F; var favServer = new Dictionary <string, int>(10000); var favGameModes = new Dictionary <string, int>(10); var favMaps = new Dictionary <string, int>(10000); var daysStats = new Dictionary <DateTime, int>(10000); var query = db.Scoreboards.Where(x => EF.Functions.Like(x.Name, $"%{name}%")) .Join(db.Matches, x => x.Match.Id, y => y.Id, (x, y) => new { MatchEndpoint = y.EndpointId, MatchTimestamp = y.Timestamp, MatchMap = y.Map, MatchGameMode = y.GameMode, PlayerRating = x.Rating, PlayersKills = x.Kills, PlayerDeaths = x.Deaths }); await query.ForEachAsync(x => { result.totalMatchesPlayed++; if (x.PlayerRating == 1F) { result.totalMatchesWon++; } totalKills += x.PlayersKills; totalDeath += x.PlayerDeaths; totalRating += x.PlayerRating; if (x.MatchTimestamp > result.lastMatchPlayed) { result.lastMatchPlayed = x.MatchTimestamp; } favServer.CollectGroupStats(x.MatchEndpoint); favMaps.CollectGroupStats(x.MatchMap); favGameModes.CollectGroupStats(x.MatchGameMode); daysStats.CollectGroupStats(x.MatchTimestamp.Date); }); if (result.totalMatchesPlayed == 0) { return(null); } result.favoriteServer = favServer.Aggregate((r, x) => x.Value > r.Value ? x : r).Key; result.uniqueServers = favServer.Count(); result.favoriteGameMode = favGameModes.Aggregate((r, x) => x.Value > r.Value ? x : r).Key; result.maximumMatchesPerDay = daysStats.Max(x => x.Value); result.averageMatchesPerDay = (float)daysStats.Average(x => x.Value); result.averageScoreboardPercent = totalRating / result.totalMatchesPlayed; result.killToDeathRatio = totalDeath > 0 ? totalKills / totalDeath : 1; return(result); }
private IReadOnlyList <TablePlacing> CalculateTablePlacings() { var tablePlacings = new Dictionary <string, SettableTablePlacing>(); foreach (var simulatedMatch in this.Matches) { if (!tablePlacings.ContainsKey(simulatedMatch.HomeTeamName)) { tablePlacings[simulatedMatch.HomeTeamName] = new SettableTablePlacing(); } if (!tablePlacings.ContainsKey(simulatedMatch.AwayTeamName)) { tablePlacings[simulatedMatch.AwayTeamName] = new SettableTablePlacing(); } var homePlacing = tablePlacings[simulatedMatch.HomeTeamName]; var awayPlacing = tablePlacings[simulatedMatch.AwayTeamName]; switch (simulatedMatch.Score.Result) { case Result.HomeWin: homePlacing.Won++; awayPlacing.Lost++; break; case Result.Draw: homePlacing.Drawn++; awayPlacing.Drawn++; break; case Result.AwayWin: homePlacing.Lost++; awayPlacing.Won++; break; default: throw new ArgumentOutOfRangeException(); } homePlacing.GoalsFor += simulatedMatch.Score.Home; homePlacing.GoalsAgainst += simulatedMatch.Score.Away; awayPlacing.GoalsFor += simulatedMatch.Score.Away; awayPlacing.GoalsAgainst += simulatedMatch.Score.Home; } var averageGoalsScored = tablePlacings.Average(tp => tp.Value.GoalsFor); return(tablePlacings .OrderByDescending(kvp => kvp.Value.Points) .ThenByDescending(kvp => kvp.Value.GoalDifference) .ThenByDescending(kvp => kvp.Value.GoalsFor) .ThenBy(kvp => kvp.Key) .Select((kvp, pos) => kvp.Value.TablePlacing(pos + 1, kvp.Key, averageGoalsScored)) .ToArray()); }
public string GetSerializedProgress() { var combinedProgress = (StartupProgress)progress.Clone(); var averageProgress = startupProgresses.Average(pair => pair.Value.Progress); combinedProgress.Progress = Convert.ToInt32(averageProgress); return(new JavaScriptSerializer().Serialize(combinedProgress)); }
/// <summary> /// Updates manipulator state on each game update /// </summary> public void Update() { if (!Activated) { foreach (var touch in touches) { var delta = touch.Value.NewLocation - touch.Value.Origin; if (delta.Length() > 5) { Activate(); break; } } } else { Vector2 oldCenter; Vector2 newCenter; oldCenter.X = touches.Average(p => p.Value.OldLocation.X); oldCenter.Y = touches.Average(p => p.Value.OldLocation.Y); newCenter.X = touches.Average(p => p.Value.NewLocation.X); newCenter.Y = touches.Average(p => p.Value.NewLocation.Y); var deltaScaling = touches.Average(p => p.Value.GetScaling(oldCenter, newCenter)); var deltaTranlsation = newCenter - oldCenter; totalTranslation += deltaTranlsation; totalScaling *= deltaScaling; if (deltaTranlsation.Length() > 0 || deltaScaling != 1) { //Log.Message("Delta: {0} {1}", delta.X, delta.Y ); targetFrame.OnManipulationUpdate(totalTranslation, totalScaling, deltaTranlsation, deltaScaling); foreach (var touch in touches) { touch.Value.OldLocation = touch.Value.NewLocation; } } } }
//static void Main(string[] args) public void MethodProductDb() { var map = new Dictionary <string, int>(); map.Add("Eggs", 200); map.Add("Milk", 200); map.Add("Fish", 400); map.Add("Apples", 200); map.Add("Bread", 50); map.Add("chicken", 550); // How much is the fish? Console.WriteLine(map["Fish"]); Console.WriteLine("How much is the fish? " + map["Fish"]); int valueOfTheKey; map.TryGetValue("Fish", out valueOfTheKey); Console.WriteLine("Value of the key Fish is : " + map.TryGetValue("Fish", out valueOfTheKey)); // What is the most expensive product? var keyOfMaxValue = map.Aggregate((l, r) => l.Value > r.Value ? l : r).Key; Console.WriteLine("Max value has :" + keyOfMaxValue); // What is the average price? var averageValue = map.Average(t => t.Value); Console.WriteLine("Average value is " + averageValue); // How many products' price is below 300? var productsUnder300 = map.Where(v => v.Value < 300).Count(); Console.WriteLine("How many products have value lower then 300 : " + productsUnder300); // Is there anything we can buy for exactly 125? var productFor125 = map.FirstOrDefault(v => v.Value == 125).Key; if (productFor125 != null) { Console.WriteLine("For 125 you can have : " + productFor125); } else { Console.WriteLine("There is nothing for 125..."); } // What is the cheapest product? var keyOfMinValue = map.Aggregate((l, r) => l.Value < r.Value ? l : r).Key; Console.WriteLine("Min value has :" + keyOfMinValue); }
private void DrawAvailability(Graphics g) { var avail = new Dictionary<int, float> (); foreach (var pair in _NodeManager.GlobalAvailability ().PieceAvailability) { avail[pair.Key] = (float) pair.Value / _NodeManager.Nodes.Count (); } var rw = ClientRectangle.Width * 0.25f; var rh = rw / 5 * (float) Math.Sqrt (_NodeManager.Nodes.Count ()); var rectPieces = new RectangleF (ClientRectangle.Width - rw, 0, rw, rh); Node.DrawPieceRect (g, rectPieces, avail, 100f * avail.Average (x => x.Value), true); }
public void PrintTrip() { using (StreamWriter writer = new StreamWriter(newPathFile, true)) { var average = chargesPerPerson.Average(c => c.Value); var newList = chargesPerPerson.ToDictionary(c => c.Key, c => average - c.Value); foreach (var e in newList) { writer.WriteLine(e.Value < 0 ? "{0} - (${1})" : "{0} - ${1}", e.Key, Math.Round(e.Value, 2).ToString().Replace("-", "")); } writer.WriteLine(""); } }
public void PrintToConsole() { Console.WriteLine($" {_dataSize} elements:"); foreach (var entry in _results) { Console.WriteLine($" {entry.Key, 10}: " + (entry.Value.CorrectlySorted ? " OK " : " FAIL ") + $"{entry.Value.Time.TotalSeconds:F8} s., " + $"{entry.Value.SwapCount} swaps, " + $"{entry.Value.CompareCount} compares."); } Console.WriteLine($" {"Average:",10}: " + " " + $"{_results.Average(entry => entry.Value.Time.TotalSeconds):F8} s., " + $"{_results.Average(entry => entry.Value.SwapCount)} swaps, " + $"{_results.Average(entry => entry.Value.CompareCount)} compares."); Console.WriteLine(); }
public void Add(Char character) { characters.Add( removeDiacritics(character), characters.Average(c => c.Value) ); if (Config.IsAuthor) { structPath.Write( characters.OrderBy(p => p.Key) .ToDictionary(p => p.Key, p => p.Value) ); } }
public void AddLeaf(Leaf leaf) { int key = leaf.GetHashCode(); if (_leafs.Count == Int32.MaxValue) { double avg = _leafs.Average(x => x.Value.Attempts); foreach (var toDelete in _leafs.Where(x => x.Value.Attempts < avg && x.Value.GetHashCode() != key).Select(x => x.Value).ToList()) { _leafs.Remove(toDelete.GetHashCode()); } } _leafs.Add(key, leaf); }
private void buttonImageColorAuto_Click(object sender, EventArgs e) { float avg = 0; if (colorList.Count <= 0) { return; } avg = colorList.Average(x => x.Key.GetBrightness()); colorList = colorList.ToDictionary(p => p.Key, p => p.Key.GetBrightness() >= avg); dataGridViewBackgroundColor.Refresh(); updateBitmapPreview(); }
public static Corpse find_corpse(Pawn pawn, Map m) { float min_fuckability = 0.10f; // Don't rape pawns with <10% fuckability float avg_fuckability = 0f; // Average targets fuckability, choose target higher than that var valid_targets = new Dictionary <Corpse, float>(); // Valid pawns and their fuckability Corpse chosentarget = null; // Final target pawn IEnumerable <Thing> targets = m.spawnedThings.Where(x => x is Corpse && pawn.CanReserveAndReach(x, PathEndMode.OnCell, Danger.Some) && !x.IsForbidden(pawn) ); foreach (Corpse target in targets) { if (!xxx.can_path_to_target(pawn, target.Position)) { continue; // too far } // Filter out rotters if not necrophile. if (!xxx.is_necrophiliac(pawn) && target.CurRotDrawMode != RotDrawMode.Fresh) { continue; } float fuc = SexAppraiser.would_fuck(pawn, target, false, false); if (fuc > min_fuckability) { valid_targets.Add(target, fuc); } } if (valid_targets.Any()) { avg_fuckability = valid_targets.Average(x => x.Value); // choose pawns to f**k with above average fuckability var valid_targetsFilteredAnimals = valid_targets.Where(x => x.Value >= avg_fuckability); if (valid_targetsFilteredAnimals.Any()) { chosentarget = valid_targetsFilteredAnimals.RandomElement().Key; } } return(chosentarget); }
public static SortedDictionary <DateTime, double> Compress(Dictionary <DateTime, double> data, double?compressionWindow = null, int?maximumMinutesBetweenPoints = null) { if (!data.Any()) { return(new SortedDictionary <DateTime, double>()); } maximumMinutesBetweenPoints = maximumMinutesBetweenPoints ?? GetMaximumMinutesForDataCompression(Math.Ceiling(data.Max(c => c.Key).Subtract(data.Min(c => c.Key)).TotalMinutes)); var maximumTimeWindow = new TimeSpan(0, maximumMinutesBetweenPoints.Value, 0).Ticks; compressionWindow = compressionWindow ?? data.Average(c => c.Value) * 0.005; var orderedData = data.OrderBy(c => c.Key); var compressedData = new SortedDictionary <DateTime, double>(); var operationData = new SortedDictionary <DateTime, double>(); compressedData[orderedData.ElementAt(0).Key] = orderedData.ElementAt(0).Value; var lastProcessed = compressedData.Last(); for (int i = 1; i < orderedData.Count(); ++i) { var currentData = orderedData.ElementAt(i); if (currentData.Key.Ticks - lastProcessed.Key.Ticks > maximumTimeWindow) { compressedData[orderedData.ElementAt(i - 1).Key] = orderedData.ElementAt(i - 1).Value; lastProcessed = orderedData.ElementAt(i - 1); operationData = new SortedDictionary <DateTime, double>(); } else if (operationData.Any()) { foreach (KeyValuePair <DateTime, double> operationalPair in operationData) { if (GetDistance(lastProcessed, currentData, operationalPair) > compressionWindow.Value) { compressedData[orderedData.ElementAt(i - 1).Key] = orderedData.ElementAt(i - 1).Value; lastProcessed = orderedData.ElementAt(i - 1); operationData = new SortedDictionary <DateTime, double>(); break; } } } operationData[currentData.Key] = currentData.Value; } return(compressedData); }
/// <summary> /// на основе ряда наблюдений на высоте 10м подобрать АМС и выбрать ряд коэффициентов m для поднятия скорости на высоту /// </summary> /// <param name="range">ряд наблюдений на МС</param> /// <param name="coordinates">координаты ряда</param> /// <param name="MSMeteostations">БД АМС</param> /// <param name="searchRadius">расстояние для фильтрации АМС в метрах. Если задано NaN, то фильтрация по расстоянию проводить не будет</param> /// <param name="maximalRelativeSpeedDeviation">максимальное среднеквадратичное отклонение относительной скорости. NaN, если не надо учитывать</param> /// <returns></returns> public static SuitAMSResult GetSuitAMS(RawRange range, PointLatLng coordinates, AMSMeteostationDatabase MSMeteostations, double searchRadius, double maximalRelativeSpeedDeviation) { //посчитать среднемесячные относительные скорости для МС //выбрать АМС из заданного радиуса //посчитать относительные скорости на всех АМС //найти наиболее подходящую АМС по наименьшему среднеквадратичному отклонению относительных скоростей //относительные среднемесячные скорости на МС Dictionary <Months, double> msRelatives = getRelativeAverageMonthSpeeds(range, out bool allMonth); //выбор АМС в заданном радиусе List <AMSMeteostationInfo> amss = double.IsNaN(searchRadius) ? MSMeteostations.List : MSMeteostations.GetNearestMS(coordinates, searchRadius, true); //выбираем все АМС в радиусе if (amss == null) { return(null); } //поиск АМС с минимальным среднеквадр отклонением относительных скоростей SuitAMSResult res = new SuitAMSResult(); double minDev = double.MaxValue; SuitAMSResultItem minItem = null; foreach (AMSMeteostationInfo ams in amss) { SuitAMSResultItem item = new SuitAMSResultItem(); item.Deviation = Math.Sqrt(msRelatives.Average((kv) => { return(Math.Pow(kv.Value - ams.RelativeSpeeds[kv.Key], 2)); })); //корень(среднее ((KjМС - KjАМС)^2)), j - номер месяца item.AMS = ams; item.AllMonthInRange = allMonth; item.IsDeviationFailed = !double.IsNaN(maximalRelativeSpeedDeviation) && item.Deviation > maximalRelativeSpeedDeviation; item.Distance = EarthModel.CalculateDistance(coordinates, ams.Position); res.Add(item); //ищем минимальный элемент if (minDev > item.Deviation) { minDev = item.Deviation; minItem = item; } } res.AllMonthInRange = allMonth; res.RangeRelativeSpeeds = msRelatives; res.ItemWithMinimalDeviation = minItem; return(res); }
public Souhrn[] TopDodavatele(int?minPocet = null) { if (Dodavatele.Count() == 1) { return(Dodavatele); } Dictionary <int, decimal> schody = new Dictionary <int, decimal>(); Souhrn[] sortedDodav = Dodavatele.OrderByDescending(o => o.HodnotaSmluv).ToArray(); decimal avgHodnota = Dodavatele.Average(m => m.HodnotaSmluv); int posOver60Perc = -1; decimal tmpSum = 0; for (int i = 0; i < sortedDodav.Count() - 1; i++) { schody.Add(i, sortedDodav[i].HodnotaSmluv - sortedDodav[i + 1].HodnotaSmluv); tmpSum = tmpSum + sortedDodav[i].HodnotaSmluv; if (posOver60Perc == -1 && tmpSum >= this.HodnotaSmluvProVypocet * 0.65m) { posOver60Perc = i; } } decimal avgDiff = schody.Average(m => m.Value); int lastPosOfDominant = posOver60Perc; //najdi dalsi po posOver60Perc, ktery ma schod vetsi nez avgDiff for (int i = posOver60Perc + 1; i < schody.Count(); i++) { if (schody[i] > avgDiff) { lastPosOfDominant = i; } else { break; } } if (minPocet.HasValue && lastPosOfDominant + 1 < minPocet) { lastPosOfDominant = minPocet.Value - 1; } return(sortedDodav.Take(lastPosOfDominant + 1).ToArray()); }
public DateTime AvgTaskEstimated(int projectId) { Dictionary <int, int> days = new Dictionary <int, int>(); var taskTracks = _taskTrackDal .GetListWithTwoIncludes(track => track.TaskState, track => track.Task, track => track.EventDescription.ToLower().Contains(nameof(TaskStateBaseNames.ToDo).ToLower()) || track.EventDescription.ToLower().Contains(nameof(TaskStateBaseNames.Done).ToLower()) || track.EventDescription.ToLower().Contains(nameof(TaskStateBaseNames.Doing).ToLower())) .OrderByDescending(track => track.CreateDate).ToList(); if (taskTracks.Count > 0) { taskTracks.ForEach(track => { var firstTaskTracks = taskTracks.Where(taskTrack => taskTrack.Task.Id == track.Task.Id && taskTrack.Task.ProjectId == projectId).OrderByDescending(taskTrack => taskTrack.CreateDate).ToList(); if (firstTaskTracks.Count > 0) { var firstTaskTrack = firstTaskTracks.First(); if (firstTaskTrack.EventDescription.ToLower().Contains(nameof(TaskStateBaseNames.Done).ToLower())) { TimeSpan dayDiff = firstTaskTrack.CreateDate - firstTaskTrack.Task.CreateDate; if (!days.ContainsKey(firstTaskTrack.Task.Id)) { days.Add(firstTaskTrack.Task.Id, dayDiff.Days); } } } }); } DateTime estDateTime; if (days.Count == 0 || days.Sum(pair => pair.Value) == 0) { estDateTime = DateTime.Now.AddDays(10); } else { var avg2 = Math.Ceiling(days.Average(pair => pair.Value)); estDateTime = DateTime.Now.AddDays(avg2); } return(estDateTime); }
/// <summary> /// The entry point of the program, where the program control starts and ends. /// </summary> /// <param name="args">The command-line arguments.</param> public static void Main(string[] args) { var fbEmailFile = @"../../facebook.email.dontcommit"; var fbPasswordFile = @"../../facebook.pw.dontcommit"; var fbAuthInfoFile = @"../../facebook.auth.dontcommit"; var tndrAuthInfoFile = @"../../tinder.auth.dontcommit"; var fbEmailAddress = File.ReadAllText (fbEmailFile); var fbPassword = File.ReadAllText (fbPasswordFile); var recommendationsFolderPath = "nyc_recommendations"; var nopeFolderPath = "nope"; var yesFolderPath = "yes"; var startTime = DateTime.Now; var runForTimeSpan = new TimeSpan (4, 0, 0); var hitCountDictionaryFile = DateTime.Now.ToString ("MM.dd.yyyy.hh.mm.ss") + "_hitcountdict.dontcommit.txt"; var recsIntervalRandomizer = new Random (); var recsIntervalTimeSpanMininum = new TimeSpan (0, 0, 1, 0); var recsIntervalTimeSpanMaximum = new TimeSpan (0, 0, 2, 0); var totalRecommendations = 0.0; var hitCountDictionary = new Dictionary<string, int> (); var disposables = new List<IDisposable> (); var jsonSerializer = new NewtonsoftJsonSerializer (); var webDriverFactory = new PhantomJSWebDriverFactory (); var webDriverForFacebookAuthenticator = webDriverFactory.CreateWebDriver (); disposables.Add (webDriverForFacebookAuthenticator); Console.Clear (); try { FacebookSession fbSession = null; if(File.Exists (fbAuthInfoFile)) { var fbAuthInfo = File.ReadAllLines (fbAuthInfoFile); if(fbAuthInfo.Any ()) { var fbAccessToken = fbAuthInfo[0]; var fbId = fbAuthInfo[1]; fbSession = new FacebookSession (fbAccessToken, fbId); } } if(fbSession != null) { Console.WriteLine ("Using previous Facebook session authentication."); } else { Console.WriteLine ("Reauthenticating with Facebook..."); var fbAuthenticator = new SeleniumFacebookAuthenticator (webDriverForFacebookAuthenticator, fbEmailAddress, fbPassword); fbSession = fbAuthenticator.Authenticate (); } if(fbSession != null) { Console.WriteLine ("Authenticated with Facebook: '{0}'.", fbSession); File.WriteAllLines (fbAuthInfoFile, new string[] { fbSession.AccessToken, fbSession.Id }); } else { Console.WriteLine ("Authentication with Facebook failed."); if(File.Exists (fbAuthInfoFile)) { File.Delete (fbAuthInfoFile); } goto end; } string tndrAccessToken = null; TndrClient tndrClient = null; if(File.Exists (tndrAuthInfoFile)) { var tndrAuthInfo = File.ReadAllLines (tndrAuthInfoFile); if(tndrAuthInfoFile.Any ()) { tndrAccessToken = tndrAuthInfo[0]; } } if(tndrAccessToken != null) { Console.WriteLine ("Using previous Tinder session authentication."); tndrClient = new TndrClient (tndrAccessToken); try { var tndrUpdatesResponse = tndrClient.GetUpdates (); if(tndrUpdatesResponse == null || tndrUpdatesResponse.LastActiveDate == null) { tndrAccessToken = null; } } catch { tndrAccessToken = null; } } if(tndrAccessToken == null) { Console.WriteLine ("Reauthenticating with Tinder using current FacebookSession..."); var tndrAuthenticationResponse = TndrClient.Authenticate (fbSession); if(tndrAuthenticationResponse != null) { tndrAccessToken = tndrAuthenticationResponse.AccessToken; } } if(tndrAccessToken != null) { Console.WriteLine ("Authenticated with Tinder: '{0}'.", tndrAccessToken); File.WriteAllLines (tndrAuthInfoFile, new string[] { tndrAccessToken }); tndrClient = new TndrClient (tndrAccessToken); } else { Console.WriteLine ("Authentication with Tinder failed."); if(File.Exists (tndrAuthInfoFile)) { File.Delete (tndrAuthInfoFile); } if(File.Exists (fbAuthInfoFile)) { File.Delete (fbAuthInfoFile); } goto end; } var webClient = new WebClient (); disposables.Add (webClient); //IWebDriver photoWebDriver = null; while((DateTime.Now - startTime) < runForTimeSpan) { var tndrUpdatesResponse = tndrClient.GetUpdates (); if(tndrUpdatesResponse.Matches != null) { Console.WriteLine ("Tinder matches: {0}.", tndrUpdatesResponse.Matches.Count ()); } var tndrReccomendationsResponse = tndrClient.GetRecommendations (); if(tndrReccomendationsResponse.StatusCode != 200) { Console.WriteLine ("No Tinder recommendations available or requesting too fast."); } else { if(tndrReccomendationsResponse.Recommendations.Any (r => r.TinderId.StartsWith ("tinder_rate_limited_id"))) { Console.WriteLine ("Tinder Rate Limit Reached"); goto end; } totalRecommendations += tndrReccomendationsResponse.Recommendations.Count (); Console.WriteLine ("Tinder recommendations: {0}.", tndrReccomendationsResponse.Recommendations.Count ()); if(tndrReccomendationsResponse.Recommendations.Any ()) { //try //{ // var urlTest = photoWebDriver.Url; //} //catch //{ // photoWebDriver = new FirefoxDriver (); // webDrivers.Add (photoWebDriver); //} foreach(var tndrRecommendation in tndrReccomendationsResponse.Recommendations) { if(hitCountDictionary.ContainsKey (tndrRecommendation.TinderId)) { hitCountDictionary[tndrRecommendation.TinderId]++; } else { hitCountDictionary[tndrRecommendation.TinderId] = 1; } //photoWebDriver.Url = tndrRecommendation.Photos.First ().Url; //photoWebDriver.Navigate (); //photoWebDriver.FindElement (By.TagName ("body")).SendKeys (Keys.Command + "t"); var recommendationFolderPath = Path.Combine (recommendationsFolderPath, tndrRecommendation.TinderId); var nopeRecommendationFolderPath = Path.Combine (nopeFolderPath, tndrRecommendation.TinderId); var yesRecommendationFolderPath = Path.Combine (yesFolderPath, tndrRecommendation.TinderId); if(!Directory.Exists (recommendationFolderPath) && !Directory.Exists (nopeRecommendationFolderPath) && !Directory.Exists (yesRecommendationFolderPath)) { Console.WriteLine ("\tNEW=> Name: {0, -20} Age: {1, -10} Recommended {2} time(s).", tndrRecommendation.Name, DateTime.Now.Year - DateTime.Parse (tndrRecommendation.BirthDate).Year, hitCountDictionary[tndrRecommendation.TinderId]); Directory.CreateDirectory (recommendationFolderPath); Directory.CreateDirectory (Path.Combine (recommendationFolderPath, "photos")); var recommendationFile = Path.Combine (recommendationFolderPath, string.Format ("{0}_{1}_{2}.txt", tndrRecommendation.Name, DateTime.Now.Year - DateTime.Parse (tndrRecommendation.BirthDate).Year, tndrRecommendation.TinderId)); File.WriteAllText (recommendationFile, jsonSerializer.Serialize (tndrRecommendation, indented: true)); foreach(var photo in tndrRecommendation.Photos) { //Console.WriteLine ("\t\tPhoto: {0}", photo.Url); var photoUri = new Uri (photo.Url); var photoFileName = Path.GetFileName (photoUri.AbsoluteUri); var photoLocalFilePath = Path.Combine (recommendationFolderPath, "photos", photoFileName); { try { webClient.DownloadFile (photoUri.ToString (), photoLocalFilePath); } catch { } } } } else { Console.WriteLine ("\tOLD=> Name: {0, -20} Age: {1, -10} Recommended {2} time(s).", tndrRecommendation.Name, DateTime.Now.Year - DateTime.Parse (tndrRecommendation.BirthDate).Year, hitCountDictionary[tndrRecommendation.TinderId]); } } try { var jsonHitCountDictionary = jsonSerializer.Serialize (hitCountDictionary.OrderByDescending (h => h.Value), true); File.WriteAllText (Path.Combine (recommendationsFolderPath, hitCountDictionaryFile), jsonHitCountDictionary); } catch { } } else { Console.WriteLine ("No recommendations provided."); } } var average = hitCountDictionary.Average (x => x.Value); Console.WriteLine ("Top 20 Hits:"); foreach(var hitCountEntry in hitCountDictionary.OrderByDescending (h => h.Value).Take (20)) { Console.WriteLine ("\tId: {0}\tTotal Hits: {1} ({2:P2})\tLiked You: {3:P2}", hitCountEntry.Key, hitCountEntry.Value, (hitCountEntry.Value / totalRecommendations), (1 - (average / hitCountEntry.Value))); } Console.WriteLine ("Time left {0}...", runForTimeSpan - (DateTime.Now - startTime)); TimeSpan timeLapsed; var sleepForMs = recsIntervalRandomizer.Next (Convert.ToInt32 (recsIntervalTimeSpanMininum.TotalMilliseconds), Convert.ToInt32 (recsIntervalTimeSpanMaximum.TotalMilliseconds)); var sleepForTimeSpan = TimeSpan.FromMilliseconds (sleepForMs); var sleepStart = DateTime.Now; while((timeLapsed = DateTime.Now - sleepStart) < sleepForTimeSpan) { Console.WriteLine ("Sleeping for {0} {1}", (sleepForTimeSpan - timeLapsed), GenerateProgressBar (timeLapsed.TotalMilliseconds, sleepForTimeSpan.TotalMilliseconds)); if(Console.KeyAvailable) { if(Console.ReadKey (true).Key == ConsoleKey.Escape) { Console.WriteLine ("\nExiting Tndr tester..."); goto end; } } Console.CursorTop = Console.CursorTop - 1; } } } catch(Exception ex) { Console.WriteLine (ex); } finally { foreach(var webDriver in disposables) { webDriver.Dispose (); } } end: Console.WriteLine ("Ran for {0}.", (DateTime.Now - startTime)); Console.WriteLine ("Press enter to quit."); Console.ReadLine (); }
private static void PrintStatistics() { var remappedDictionary = new Dictionary<long, long>(); if (!StartTimes.Any()) return; foreach (var k in StartTimes) { long endTime; if (EndTimes.TryGetValue(k.Key, out endTime)) { remappedDictionary[k.Value] = endTime; } } var averageTime = remappedDictionary.Average(kv => { return kv.Value - kv.Key; }); Console.WriteLine(string.Format("\n==Average time per Fansub File parse: {0} milliseconds==", averageTime)); }
public string GetReadyForNumerical(bool saveLoadedData = true) { if (ReadyForNumerical) { return "Is ready."; } StringBuilder log = new StringBuilder(); Utils.StartTimer(); log.AppendLine(Utils.PrintHeading("Create R_train/R_test sets from " + DataSetFile)); Utils.LoadMovieLensSplitByCount(DataSetFile, out R_train, out R_test, MinCountOfRatings, MaxCountOfRatings, CountOfRatingsForTrain, ShuffleData, Seed); Console.WriteLine(R_train.DatasetBrief("Train set")); Console.WriteLine(R_test.DatasetBrief("Test set")); log.AppendLine(R_train.DatasetBrief("Train set")); log.AppendLine(R_test.DatasetBrief("Test set")); R_unknown = R_test.IndexesOfNonZeroElements(); log.AppendLine(Utils.PrintValue("Relevant item criteria", RelevantItemCriteria.ToString("0.0"))); RelevantItemsByUser = ItemRecommendationCore.GetRelevantItemsByUser(R_test, RelevantItemCriteria); log.AppendLine(Utils.PrintValue("Mean # of relevant items per user", RelevantItemsByUser.Average(k => k.Value.Count).ToString("0"))); log.AppendLine(Utils.StopTimer()); #region Prepare similarity data if (File.Exists(GetDataFileName("USR")) && File.Exists(GetDataFileName("ISR")) && File.Exists(GetDataFileName("SSIIR"))) { Utils.StartTimer(); Utils.PrintHeading("Load user-user similarities (rating based)"); UserSimilaritiesOfRating = Utils.IO<SimilarityData>.LoadObject(GetDataFileName("USR")); Utils.StopTimer(); Utils.StartTimer(); Utils.PrintHeading("Load item-item similarities (rating based)"); ItemSimilaritiesOfRating = Utils.IO<SimilarityData>.LoadObject(GetDataFileName("ISR")); Utils.StopTimer(); Utils.StartTimer(); Utils.PrintHeading("Load item-item strong similarity indicators (rating based)"); StrongSimilarityIndicatorsByItemRating = Utils.IO<HashSet<Tuple<int, int>>>.LoadObject(GetDataFileName("SSIIR")); Utils.StopTimer(); } else { Utils.StartTimer(); Utils.PrintHeading("Compute user-user similarities (rating based)"); Metric.GetPearsonOfRows(R_train, MaxCountOfNeighbors,StrongSimilarityThreshold, out UserSimilaritiesOfRating); if (saveLoadedData) { Utils.IO<SimilarityData>.SaveObject(UserSimilaritiesOfRating, GetDataFileName("USR")); } Utils.StopTimer(); Utils.StartTimer(); Utils.PrintHeading("Compute item-item similarities (rating based)"); Metric.GetPearsonOfColumns(R_train, MaxCountOfNeighbors, StrongSimilarityThreshold, out ItemSimilaritiesOfRating, out StrongSimilarityIndicatorsByItemRating); if (saveLoadedData) { Utils.IO<SimilarityData>.SaveObject(ItemSimilaritiesOfRating, GetDataFileName("ISR")); Utils.IO<HashSet<Tuple<int,int>>> .SaveObject(StrongSimilarityIndicatorsByItemRating, GetDataFileName("SSIIR")); } Utils.StopTimer(); } #endregion ReadyForNumerical = true; return log.ToString(); }
protected void SetReviews(string UPC) { var reviews = DBHelper.GetTable("ItemReviews").Table.AsEnumerable().Where(r => r.Field<string>("UPC") == UPC); this.ReviewNum.Text = reviews.Count().ToString(); if (reviews.Count() != 0) { var rating = new Dictionary<string, double>(); foreach(var type in new[]{ "Quality", "Features", "Performance", "Appearance", "Durability" }) { rating[type] = Math.Round(reviews.Average(r => r.Field<int>(type + "Rating"))); }; this.ItemRating.DataSource = new ArrayList { new { Overall = Math.Round(rating.Average(p => p.Value)), Quality = rating["Quality"], Features = rating["Features"], Performance = rating["Performance"], Appearance = rating["Appearance"], Durability = rating["Durability"] } }; this.ItemRating.DataBind(); } this.ItemReviews.DataSource = reviews.AsDataView(); this.ItemReviews.DataBind(); }
/// <summary> /// Run Calculations for cost analyzer chart /// </summary> public List<Price_Data> PriceCalculator(int numberOfTrains, List<Tuple<int, double>> numberRegensNormOps, List<Tuple<int, double>> numberRegensClean, Dictionary<DateTime, Tuple<int, double, string>> throughputClean, Dictionary<DateTime, Tuple<int, double, string>> throughputNormOps, System.Data.DataTable trainList) { try { // Load calculation parameters double causticPrice = calculationParameters.CausticPrice; double acidPrice = calculationParameters.AcidPrice; double percentCaustic = calculationParameters.CausticConcentration * 0.01; double percentAcid = calculationParameters.AcidConcentration * 0.01; double causticUsed = calculationParameters.AmountOfCaustic; double acidUsed = calculationParameters.AmountOfAcid; double acidPriceFactor = calculationParameters.AcidPriceConversion; double causticPriceFactor = calculationParameters.CausticPriceConversion; double amountAnion = calculationParameters.AnionAmount; double amountCation = calculationParameters.CationAmount; // Account for dividing by zero for chemical concentration by making value arbitrarily small if (percentCaustic == 0) { percentCaustic = 0.0001; } if (percentAcid == 0) { percentAcid = 0.0001; } // Variable to hold entire price timeseries data List<Price_Data> price = new List<Price_Data>(); // Holds the cost to regenerate data List<double> regenerationCostBefore = new List<double>(); List<double> regenerationCostAfter = new List<double>(); List<double> weeklyRegenerationsNormOps = new List<double>(); List<double> weeklyRegenerationsClean = new List<double>(); List<double> cleaningCostBefore = new List<double>(); List<double> replacementCostBefore = new List<double>(); List<double> cleaningCostAfter = new List<double>(); List<double> replacementCostAfter = new List<double>(); List<double> opsCostBefore = new List<double>(); List<double> opsCostAfter = new List<double>(); // Calculate the per regeneration before price int wk = 0; foreach (var week in numberRegensNormOps) { Price_Data eachPrice_before = new Price_Data(); double costToRegenerate = (((((causticPrice * causticPriceFactor) / percentCaustic) * (causticUsed)) * amountAnion) + ((((acidPrice * acidPriceFactor) / percentAcid) * (acidUsed)) * amountCation)) * week.Item2; regenerationCostBefore.Add(costToRegenerate); // Check if the resin was either cleaned or replaced string cleanOrReplace; if (wk <= throughputNormOps.Count - 1) { cleanOrReplace = throughputNormOps.ElementAt(wk).Value.Item3; if (cleanOrReplace == "Replace") { double replacmentCost = (calculationParameters.ReplacementPriceAnion + calculationParameters.ReplacemtntPriceCation) * (calculationParameters.AnionAmount + calculationParameters.CationAmount); costToRegenerate = costToRegenerate + replacmentCost; replacementCostBefore.Add(replacmentCost); } else if (cleanOrReplace == "Clean") { double cleaningCost = ((calculationParameters.CationCleaningPrice - (calculationParameters.CationCleaningPrice * calculationParameters.CationDiscount)) + (calculationParameters.AnionCleaningPrice - (calculationParameters.AnionCleaningPrice * calculationParameters.AnionDiscount))) * (calculationParameters.AnionAmount + calculationParameters.CationAmount); costToRegenerate = costToRegenerate + cleaningCost; cleaningCostBefore.Add(cleaningCost); } else { replacementCostBefore.Add(0); cleaningCostBefore.Add(0); } } opsCostBefore.Add(costToRegenerate); eachPrice_before.BeforePrice = costToRegenerate; eachPrice_before.WeekNumber = week.Item1; price.Add(eachPrice_before); weeklyRegenerationsNormOps.Add(week.Item2); wk++; } int count = 0; wk = 0; // Calculate the per regeneration for the after price foreach (var week in numberRegensClean) { Price_Data eachPrice_after = new Price_Data(); double costToRegenerate = (((((causticPrice * causticPriceFactor) / percentCaustic) * (causticUsed)) * amountAnion) + ((((acidPrice * acidPriceFactor) / percentAcid) * (acidUsed)) * amountCation)) * week.Item2; regenerationCostAfter.Add(costToRegenerate); // Check if the resin was either cleaned or replaced if (wk <= throughputClean.Count - 1) { string cleanOrReplace = throughputClean.ElementAt(wk).Value.Item3; if (cleanOrReplace == "Replace") { double replacmentCost = (calculationParameters.ReplacementPriceAnion + calculationParameters.ReplacemtntPriceCation) * (calculationParameters.AnionAmount + calculationParameters.CationAmount); costToRegenerate = costToRegenerate + replacmentCost; replacementCostAfter.Add(replacmentCost); } else if (cleanOrReplace == "Clean") { double cleaningCost = ((calculationParameters.CationCleaningPrice - (calculationParameters.CationCleaningPrice * calculationParameters.CationDiscount)) + (calculationParameters.AnionCleaningPrice - (calculationParameters.AnionCleaningPrice * calculationParameters.AnionDiscount))) * (calculationParameters.AnionAmount + calculationParameters.CationAmount); costToRegenerate = costToRegenerate + cleaningCost; cleaningCostAfter.Add(cleaningCost); } else { replacementCostAfter.Add(0); cleaningCostAfter.Add(0); } wk++; } opsCostAfter.Add(costToRegenerate); eachPrice_after.AfterPrice = costToRegenerate; eachPrice_after.WeekNumber = week.Item1; price.ElementAt(count).AfterPrice = eachPrice_after.AfterPrice; count++; } // Average the cost to regenerate and update the display double regenCostBefore = regenerationCostBefore.Average(); double regenCostAfter = regenerationCostAfter.Average(); costAnalyzerResultData.RegenWeeklyCostAfter = "$" + string.Format("{0:n}", Math.Round(regenCostAfter, 2)); // Average the cost to clean or the cost to replace and update display double cleanCostBefore = cleaningCostBefore.Sum(); double cleanCostAfter = cleaningCostAfter.Sum(); double replaceCostBefore = replacementCostBefore.Sum(); double replaceCostAfter = replacementCostAfter.Sum(); costAnalyzerResultData.CleaningCostBefore = "$" + string.Format("{0:n}", Math.Round(cleanCostBefore)); costAnalyzerResultData.CleaningCostAfter = "$" + string.Format("{0:n}", Math.Round(cleanCostAfter)); costAnalyzerResultData.ReplacementCostBefore = "$" + string.Format("{0:n}", Math.Round(replaceCostBefore)); costAnalyzerResultData.ReplacementCostAfter = "$" + string.Format("{0:n}", Math.Round(replaceCostAfter)); // Sum the operations cost to calculate total cost for given duration double absoluteTotalCostBefore = opsCostBefore.Sum(); double absoluteTotalCostAfter = opsCostAfter.Sum(); double averageOpsBefore = opsCostBefore.Average(); double averageOpsAfter = opsCostAfter.Average(); double regenSavings = (averageOpsBefore - averageOpsAfter) * throughputClean.Count; double cumSavings = (absoluteTotalCostBefore - absoluteTotalCostAfter) + regenSavings; double cumInvestment = cleanCostAfter; costAnalyzerResultData.TotalOpsCostBefore = "$" + string.Format("{0:n0}", Math.Round(absoluteTotalCostBefore, 0)); costAnalyzerResultData.TotalOpsCostAfter = "$" + string.Format("{0:n0}", Math.Round(absoluteTotalCostAfter, 0)); costAnalyzerResultData.TotalWeeklyCostBefore = "$" + string.Format("{0:n}", Math.Round(averageOpsBefore, 2)); costAnalyzerResultData.TotalWeeklyCostAfter = "$" + string.Format("{0:n}", Math.Round(averageOpsAfter, 2)); cumulativeSavings = "$" + string.Format("{0:n}", Math.Round(cumSavings, 2)); // Calculate the average cost per gallon of water produced double totalOpsAverageBefore = opsCostBefore.Average(); double totalOpsAverageAfter = opsCostAfter.Average(); double beforeTPAverage = throughputNormOps.Average(x => x.Value.Item2); double afterTPAverage = throughputClean.Average(x => x.Value.Item2); double costPerGAL_Before = totalOpsAverageBefore / (beforeTPAverage / 1000); double costPerGAL_After = totalOpsAverageAfter / (afterTPAverage / 1000); costAnalyzerResultData.AvgCostPerGalBefore = "$" + string.Format("{0:n}", Math.Round(costPerGAL_Before, 2)); costAnalyzerResultData.AvgCostPerGalAfter = "$" + string.Format("{0:n}", Math.Round(costPerGAL_After, 2)); // Calculate the average amount of time before the customer breaks even from cleanings double averageCleaningCost; if (cleaningCostAfter != null && cleaningCostAfter.Count > 0 && cleaningCostAfter.Max() != 0) { averageCleaningCost = cleaningCostAfter.Where(x => x != 0).Average(); } else { averageCleaningCost = 0; } double? weeksToRecoupe = WeeksUntillBreakEven(averageOpsBefore, averageOpsAfter, averageCleaningCost); double averageReplacingCost; if (replacementCostBefore.Max() != 0) { averageReplacingCost = replacementCostBefore.Where(x => x != 0).Average(); } else { averageReplacingCost = 0; } weeksToRecoupe = WeeksUntillBreakEven(averageOpsBefore, averageOpsAfter, averageReplacingCost); double ROI = Math.Round((((cumSavings) - cleanCostAfter) / cleanCostAfter) * 100); roiGlobal = string.Format("{0:n0}", ROI) + "%"; return price; } catch { throw; } }