public void Heronian() { // given IList <double> list1 = new List <double>() { -1, 0, 1, 2, 3 }; IList <double> list2 = new List <double>() { 0, 0 }; IList <IList <double> > list3 = new List <IList <double> >() { list1, list2 }; const double EXPECTED1 = 0.934195180782892; const double EXPECTED2 = 0.0; IList <double> expected3 = new List <double>() { EXPECTED1, EXPECTED2 }; // when double result1 = Averages.Heronian(list1, GeometricMeanVariant.Offset).Value; double result2 = Averages.Heronian(list2, GeometricMeanVariant.Offset).Value; IList <double> result3 = Averages.Heronian(list3, GeometricMeanVariant.Offset); // then Assert.AreEqual(EXPECTED1, result1, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(EXPECTED2, result2, Assertions.IBM_FLOAT_SURROUNDING); Assertions.SameValues(expected3, result3, 1.0E-14); }
/// <summary> /// Update statistics with a new tweet that has been received from the Twitter api /// </summary> /// <param name="jsonString"></param> public void ProcessTweet(string jsonString) { var tweet = JsonConvert.DeserializeObject <Tweet>(jsonString); // not sure why this is happening, but receiving empty tweets from the API occasionally - throw them away if (tweet == null) { return; } // this is compute intensive, perform outside of the lock var foundEmojis = _emojiHelper.FindEmojis(tweet.data.text); // update statistics with information about this tweet - lock to be threadsafe lock (padlock) { Averages.Recompute(); Emojis.AddProperties(foundEmojis); Hashtags.AddProperties(tweet.data.entities?.hashtags?.Select(x => x.tag)); Urls.AddProperties(tweet.data.entities?.urls); } }
private void findAverages() { // function to calculate the averages of each section double weight = 0; double sum = 0; _sectionAverages.Clear(); // Iterate over each model. The Averages class is local and stores the average as well as the section name foreach (Model section in _marks) { Averages average = new Averages(); double avg = section.sectionData.Average(); average.sectionAverage = avg; average.sectionName = section.sectionName; _sectionAverages.Add(average); weight += (double)section.sectionData.Count(); sum += (double)section.sectionData.Count() * avg; } Averages overallAverage = new Averages(); overallAverage.sectionName = kOverallAverageLabel; // Weighted average formula for all Models/Sections overallAverage.sectionAverage = sum / weight; _sectionAverages.Add(overallAverage); sectionAverages = _sectionAverages; }
public static void Calculate(Averages average) { string ID = GetID(); float[] values = GetValues(); float averageRes = 0; switch (average) { case Averages.Arithmetic: averageRes = AverageFormulas.Arithmetic(values); break; case Averages.Weighted: averageRes = AverageFormulas.Weighted(values); break; case Averages.Harmonic: averageRes = AverageFormulas.Harmonic(values); break; case Averages.Geometric: averageRes = AverageFormulas.Geometric(values); break; } var Useraverage = new Average(ID, average, averageRes, values, DateTime.Now); averages.Add(Useraverage); Console.Clear(); Show(Useraverage); Console.ReadLine(); }
public void AGM() { // given IList <double> list1 = new List <double>() { 2, 3, 5, 7, 9, 10, 11, 23 }; IList <double> list3 = new List <double>() { -1, 0, 2, -3, 4, -5, 8, 7 }; IList <IList <double> > list4 = new List <IList <double> >() { list1, list3 }; const double EXPECTED1 = 7.76470249483093; const double EXPECTED2 = 8.0652853391755; const double EXPECTED3 = 0.555265813041983; IList <double> expected4 = new List <double>() { EXPECTED2, EXPECTED3 }; // when double result1 = Averages.AGM(list1, GeometricMeanVariant.Absolute).Value; double result2 = Averages.AGM(list1, GeometricMeanVariant.Offset).Value; double result3 = Averages.AGM(list3, GeometricMeanVariant.Offset).Value; IList <double> result4 = Averages.AGM(list4, GeometricMeanVariant.Offset); // then Assert.AreEqual(EXPECTED1, result1, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(EXPECTED2, result2, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(EXPECTED3, result3, Assertions.IBM_FLOAT_SURROUNDING); Assertions.SameValues(expected4, result4); }
public void Maximum() { // given IList <double> list1 = new List <double>() { -1.0, 0.0, 1.0 }; IList <double> list2 = new List <double>() { 2.0, -1.0, 0.0 }; IList <IList <double> > list5 = new List <IList <double> >() { list1, list2, new List <double>() { 1.0, 1.1, 1.2, 1.3, 1.4 } }; double expected1 = 1.0; double expected2 = 2.0; IList <double> expected5 = new List <double>() { expected1, expected2, 1.4 }; // when double result1 = Averages.Maximum(list1).Value; double result2 = Averages.Maximum(list2).Value; IList <double> result5 = Averages.Maximum(list5); // then Assert.AreEqual(expected1, result1, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(expected2, result2, Assertions.IBM_FLOAT_SURROUNDING); Assertions.SameValues(result5, expected5); }
private Average(string ID, Averages type, float average, float[] values, DateTime creationDate) { this.ID = ID; this.type = type; this.average = average; this.values = values; this.creationDate = creationDate; }
public void Median() { // given IList <double> list1 = new List <double>() { 2.0, 9.0, 1.0, 5.0, 3.0, 8.0, 6.0, 7.0, 4.0 }; IList <double> list2 = new List <double>() { 10.0, 9.0, 2.0, 8.0, 6.0, 1.0, 5.0, 3.0, 4.0, 7.0 }; IList <double> list3 = new List <double>() { 7, 8, 3, 4, 9, 2 }; IList <IList <double> > list4 = new List <IList <double> >() { list1, list2, list3 }; IList <IList <double> > list5 = new List <IList <double> >() { new List <double>() { 1.0 }, new List <double>() { 2.0, 1.0 } }; const double EXPECTED_1 = 5.0; const double EXPECTED_2 = (5.0 + 6.0) / 2.0; const double EXPECTED_3 = (4 + 7) / 2.0; IList <double> expected4 = new List <double>() { EXPECTED_1, EXPECTED_2, EXPECTED_3 }; IList <double> expected5 = new List <double>() { 1.0, (2.0 + 1.0) / 2.0 }; // when double result1 = Averages.Median(list1).Value; double result2 = Averages.Median(list2).Value; double result3 = Averages.Median(list3).Value; IList <double> result4 = Averages.Median(list4); IList <double> result5 = Averages.Median(list5); // then Assert.AreEqual(EXPECTED_1, result1, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(EXPECTED_2, result2, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(EXPECTED_3, result3, Assertions.IBM_FLOAT_SURROUNDING); Assertions.SameValues(result4, expected4); Assertions.SameValues(result5, expected5); }
private void buttonAverage_Click(object sender, EventArgs e) { int symbolId = comboBoxCustomGroupMember.SelectedIndex; List <PointData> listStockData = _symbolService.GetSymbolTradeData(symbolId); List <PointData> listMovingAverage = new Averages().MovingAverage(listStockData, average); //addChartSeries("MA[" + average + "]" + stockName); //configureChartSeries(); //addData("MA[" + average + "]" + stockName, listMovingAverage); average += 10; }
public void Generalized() { // given IList <double> list1 = new List <double>() { 1, 2, 3, 4, 5 }; IList <double> list2 = new List <double>() { 3, 7, 11, 23, 111 }; IList <double> list3 = new List <double>() { -2, 0, 1, 5 }; IList <IList <double> > list4 = new List <IList <double> >() { list1, new List <double>() { 2, 1 } }; int rank1 = 2; int rank2 = 3; int rank3 = 4; double expected1 = Math.Sqrt(11.0); double expected2 = Mathematics.Root(1381499.0 / 5.0, rank2); double expected3 = Mathematics.Root(2217.0 / 2.0, rank3) - (Math.Abs(list3.Min()) + 1.0); IList <double> expected4 = new List <double>() { expected1, Math.Sqrt(5.0 / 2.0) }; // when double result1 = Averages.Generalized(list1, StandardMeanVariants.Straight, rank1).Value; double result2 = Averages.Generalized(list2, StandardMeanVariants.Straight, rank2).Value; double result3 = Averages.Generalized(list3, StandardMeanVariants.Offset, rank3).Value; IList <double> result4 = Averages.Generalized(list4, StandardMeanVariants.Straight, rank1); // then Assert.AreEqual(expected1, result1, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(expected2, result2, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(expected3, result3, Assertions.IBM_FLOAT_SURROUNDING); Assertions.SameValues(expected4, result4); }
public void Tolerance() { // given IList <double> list1 = new List <double>() { 1, 2, 3, 3, 1, 2, 5, 5, 5, 2, 1, 3, 3, 2, 1 }; IList <double> list4 = new List <double>() { 10, 100, 1000 }; IList <IList <double> > list5 = new List <IList <double> >() { list1, list4 }; double classifier1 = 2.0; double classifier4 = 11.0; double tolerance5 = 0.1; MeansParameters params4 = new MeansParameters(); params4.Harmonic.Variant = StandardMeanVariants.Straight; double expected1 = 3.0; double expected2 = 1.0; double expected3 = 2.0; double expected4 = list4[1]; IList <double> expected5 = new List <double>() { list1.Max(), list4[1] }; // when double result1 = Averages.Tolerance(list1, classifier1, MeanType.Maximum).Value; double result2 = Averages.Tolerance(list1, classifier1, MeanType.Minimum).Value; double result3 = Averages.Tolerance(list1, classifier1, MeanType.Arithmetic).Value; double result4 = Averages.Tolerance(list4, classifier4, MeanType.Harmonic, params4).Value; IList <double> result5 = Averages.Tolerance(list5, tolerance5, MeanType.Maximum); // then Assert.AreEqual(expected1, result1, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(expected2, result2, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(expected3, result3, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(expected4, result4, Assertions.IBM_FLOAT_SURROUNDING); Assertions.SameValues(expected5, result5); }
public void Harmonic() { // given IList <double> list1 = new List <double>() { 5, 20, 40, 80, 100 }; IList <double> list2 = new List <double>() { 2, 3 }; IList <double> list3 = new List <double>() { 2, -3 }; IList <IList <double> > list4 = new List <IList <double> >() { list1, list2 }; const double EXPECTED1 = 16.8067226890756; const double EXPECTED2 = 2.4; const double EXPECTED3 = -16.0 / 7.0; IList <double> expected4 = new List <double>() { EXPECTED1, EXPECTED2 }; // when double result1 = Averages.Harmonic(list1, StandardMeanVariants.Straight).Value; double result2 = Averages.Harmonic(list2, StandardMeanVariants.Straight).Value; double result3 = Averages.Harmonic(list3, StandardMeanVariants.Offset).Value; IList <double> result4 = Averages.Harmonic(list4, StandardMeanVariants.Straight); // then Assert.AreEqual(EXPECTED1, result1, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(EXPECTED2, result2, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(EXPECTED3, result3, Assertions.IBM_FLOAT_SURROUNDING); Assertions.SameValues(expected4, result4, 1.0E-13); }
public void SMA() { // given IList <double> list1 = new List <double>() { 1, 2, 1, 2, 3, 4, 5, 2, 2, 1, 1 }; IList <double> list2 = new List <double>() { 3, 4 }; IList <double> list3 = new List <double>() { Math.Sqrt(2.0), 7.0 / 3.0, Math.Log(2.0) }; IList <IList <double> > list4 = new List <IList <double> >() { list1, list2, list3 }; const double EXPECTED1 = 2.0 + (13.0 / 66.0); const double EXPECTED2 = 3.5; double expected3 = (((list3[0] + list3[1]) / 2) + ((list3[0] + list3[1] + list3[2]) / 3) + ((list3[1] + list3[2]) / 2)) / 3.0; IList <double> expected4 = new List <double>() { EXPECTED1, EXPECTED2, expected3 }; // when double result1 = Averages.SMA(list1).Value; double result2 = Averages.SMA(list2).Value; double result3 = Averages.SMA(list3).Value; IList <double> result4 = Averages.SMA(list4); // then Assert.AreEqual(EXPECTED1, result1, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(EXPECTED2, result2, Assertions.IBM_FLOAT_SURROUNDING); Assert.AreEqual(expected3, result3, Assertions.IBM_FLOAT_SURROUNDING); Assertions.SameValues(expected4, result4, 1.0E-14); }
internal StatisticalAnalysis(GeneratorSettings genSets) { InitializeComponent(); Settings = genSets; Noises = new List <double>() { 0.1, 0.5, 1.0, 11.0 }; int dimension1 = Enum.GetValues(typeof(Phenomenon)).Length; int dimension2 = Noises.Count; int dimension3 = Enum.GetValues(typeof(MeanType)).Length; Data = Lists.GetCloneable(dimension1, dimension2, new CurvesDataManager()); Averages = Lists.GetNew <Series>(dimension1, dimension2, dimension3); Averages.ToList().ForEach(l2 => l2.ToList().ForEach(l1 => l1.ToList().ForEach(s => SeriesAssist.SetDefaultSettings(s)))); StdDeviations = Lists.GetNew <double>(dimension1, dimension2, dimension3); UpdateUiByGridPresentation(); UpdateUiBySettings(); CalculateStandardDeviations(); UpdateUiByPopulatingStandardDeviationsOnGrid(); UpdateUiByColoringGridsExtremums(); LocalizeWindow(); }
public void Array_Average_Less_Than_Fifty() { Averages avg = new Averages(); Assert.AreEqual(85, avg.AverageExercise1(50)); }
public bool ContainsKey(int key) { return(Averages.ContainsKey(key)); }
public void Array_Average_Less_Than_TwoHundredSeventyFive() { Averages avg = new Averages(); Assert.AreEqual(458, avg.AverageExercise4(275)); }
public void Array_Average_Less_Than_FiveHundred() { Averages avg = new Averages(); Assert.AreEqual(261, avg.AverageExercise5(500)); }
public void Array_Average_Less_Than_OneHundred() { Averages avg = new Averages(); Assert.AreEqual(206, avg.AverageExercise2(100)); }
public void Array_Average_Less_Than_OneHundredFifty() { Averages avg = new Averages(); Assert.AreEqual(317, avg.AverageExercise3(150)); }
public static void WriteResults(TestResults testResults) { var groupedNumberRes = testResults.results.GroupBy((TestResult tr) => tr.GroupingNumber).ToList(); GroupedTestResult groupTests; Averages avgs; int count; foreach (IGrouping <int, TestResult> group in groupedNumberRes) { avgs = new Averages(); count = 0; //Accumulate the average of this group foreach (TestResult t in group) { avgs.AvgAStarPathLength += t.AStarResult.PathLength; avgs.AvgAStarRunningTime += t.AStarResult.RunningTime; avgs.AvgHPAPathLength += t.HPAStarResult.PathLength; avgs.AvgHPARunningTime += t.HPAStarResult.RunningTime; count++; } //Average avgs.AvgAStarPathLength /= count; avgs.AvgAStarRunningTime /= count; avgs.AvgHPAPathLength /= count; avgs.AvgHPARunningTime /= count; //Add to current groupdresult if exists groupTests = accumulatedResults.Find((GroupedTestResult g) => g.GroupingNumber == group.Key && g.ClusterSize == testResults.ClusterSize && g.Layers == testResults.Layers); if (groupTests != null) { //Accumulate results in this entry int newCount = groupTests.Count + count; float oldRatio = (float)groupTests.Count / newCount; float newRatio = (float)count / newCount; groupTests.AvgClusterGenerateTime = (oldRatio * groupTests.AvgClusterGenerateTime) + (newRatio * testResults.GenerateClusterTime); groupTests.AvgAStarPathLength = (oldRatio * groupTests.AvgAStarPathLength) + (newRatio * avgs.AvgAStarPathLength); groupTests.AvgAStarRunningTime = (oldRatio * groupTests.AvgAStarRunningTime) + (newRatio * avgs.AvgAStarRunningTime); groupTests.AvgHPAPathLength = (oldRatio * groupTests.AvgHPAPathLength) + (newRatio * avgs.AvgHPAPathLength); groupTests.AvgHPARunningTime = (oldRatio * groupTests.AvgHPARunningTime) + (newRatio * avgs.AvgHPARunningTime); groupTests.Count = newCount; } else { //Add new entry groupTests = new GroupedTestResult() { GroupingNumber = group.Key, ClusterSize = testResults.ClusterSize, Layers = testResults.Layers, AvgClusterGenerateTime = testResults.GenerateClusterTime, Count = count, AvgAStarPathLength = avgs.AvgAStarPathLength, AvgAStarRunningTime = avgs.AvgAStarRunningTime, AvgHPAPathLength = avgs.AvgHPAPathLength, AvgHPARunningTime = avgs.AvgHPARunningTime }; accumulatedResults.Add(groupTests); } } //Rewrite in file string path = Path.Combine(GetBaseResultsDirectory(), string.Format("{0}.csv", testResults.MapName)); using (FileStream fs = new FileStream(path, FileMode.OpenOrCreate)) using (StreamWriter sw = new StreamWriter(fs)) { //Write header sw.WriteLine("Group No., Cluster Size, Layers, Count, Avg Cluster Gen. Time, " + "Avg A* Run. Time, Avg A* Path Length, Avg HPA* Run. Time, Avg HPA* Path Length"); foreach (GroupedTestResult g in accumulatedResults) { sw.WriteLine(String.Format("{0},{1},{2},{3},{4},{5},{6},{7},{8}", g.GroupingNumber, g.ClusterSize, g.Layers, g.Count, g.AvgClusterGenerateTime, g.AvgAStarRunningTime, g.AvgAStarPathLength, g.AvgHPARunningTime, g.AvgHPAPathLength) ); } } }
public bool?TryMakeAverageCurve(MeanType method, int curvesNo) { if (curvesNo < 0) { return(null); } string signature = string.Empty; IList <double> result = new List <double>(); try { signature = MethodBase.GetCurrentMethod().Name + '(' + method + ',' + curvesNo + ')'; IList <IList <double> > orderedSetOfCurves = SeriesAssist.GetOrderedCopy(ModifiedCurves, curvesNo); switch (method) { case MeanType.Median: result = Averages.Median(orderedSetOfCurves); break; case MeanType.Maximum: result = Averages.Maximum(orderedSetOfCurves); break; case MeanType.Minimum: result = Averages.Minimum(orderedSetOfCurves); break; case MeanType.Arithmetic: result = Averages.Arithmetic(orderedSetOfCurves); break; case MeanType.Geometric: result = Averages.Geometric(orderedSetOfCurves, MeansParams.Geometric.Variant); break; case MeanType.AGM: result = Averages.AGM(orderedSetOfCurves, MeansParams.AGM.Variant); break; case MeanType.Heronian: result = Averages.Heronian(orderedSetOfCurves, MeansParams.Heronian.Variant); break; case MeanType.Harmonic: result = Averages.Harmonic(orderedSetOfCurves, MeansParams.Harmonic.Variant); break; case MeanType.Generalized: result = Averages.Generalized(orderedSetOfCurves, MeansParams.Generalized.Variant, MeansParams.Generalized.Rank); break; case MeanType.SMA: result = Averages.SMA(orderedSetOfCurves); break; case MeanType.Tolerance: result = Averages.Tolerance(orderedSetOfCurves, MeansParams.Tolerance.Tolerance, MeansParams.Tolerance.Finisher); break; case MeanType.Central: result = Averages.Central(orderedSetOfCurves, MeansParams.Central.MassPercent); break; case MeanType.NN: result = Smoothers.NearestNeighbors(orderedSetOfCurves, MeansParams.NN.Amount); break; case MeanType.NadarayaWatson: result = Smoothers.NadarayaWatson(orderedSetOfCurves, MeansParams.NadarayaWatson.Variant, MeansParams.NadarayaWatson.KernelType, MeansParams.NadarayaWatson.KernelSize); break; } } catch (ArgumentOutOfRangeException ex) { log.Error(signature, ex); return(false); } catch (OverflowException ex) { log.Error(signature, ex); return(false); } catch (InvalidOperationException ex) { log.Error(signature, ex); return(false); } catch (Exception ex) { log.Fatal(signature, ex); return(false); } AverageCurve.Points.Clear(); SeriesAssist.CopyPoints(AverageCurve, IdealCurve, result); return(SeriesAssist.IsChartAcceptable(AverageCurve)); }
public bool TryGetValue(int key, out PopularityAverages value) { return(Averages.TryGetValue(key, out value)); }
public IEnumerator <KeyValuePair <int, PopularityAverages> > GetEnumerator() { return(Averages.GetEnumerator()); }
IEnumerator IEnumerable.GetEnumerator() { return(Averages.GetEnumerator()); }