public ResultCollection GetOrCreateVoteResultCollection(string entityId) { var openCollection = documentSession.Query<ResultCollection>() .FirstOrDefault(x => x.ResultCollectionItems.Count < MaxEntitiesPerCollectionCount && x.EntityId.Equals(entityId) && x.ResultCollectionType == "sorted"); if (openCollection == null) { var existingFullCollection = documentSession.Query<ResultCollection>() .FirstOrDefault(x => x.ResultCollectionItems.Count >= MaxEntitiesPerCollectionCount && x.EntityId.Equals(entityId) && x.ResultCollectionType == "sorted"); var newCollection = new ResultCollection(entityId, "sorted"); if (existingFullCollection != null) { // removed for brevity // some more code for moving items between an existing collection and the new one. } return newCollection; } return openCollection; }
private ResultCollection CreateResultCollection(IGaussianProcessModel gaussianProcessModel) { var res = new ResultCollection(); res.Add(new Result("Mean Function", gaussianProcessModel.MeanFunction)); res.Add(new Result("Covariance Function", gaussianProcessModel.CovarianceFunction)); res.Add(new Result("Noise sigma", new DoubleValue(gaussianProcessModel.SigmaNoise))); return res; }
public void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) { // Use vars.yourVariable to access variables in the variable store i.e. yourVariable // Write or update results given the range of vectors and resulting qualities // Uncomment the following lines if you want to retrieve the best individual // Maximization: // var bestIndex = qualities.Select((v, i) => Tuple.Create(i, v)).OrderByDescending(x => x.Item2).First().Item1; // Minimization: // var bestIndex = qualities.Select((v, i) => Tuple.Create(i, v)).OrderBy(x => x.Item2).First().Item1; // var best = individuals[bestIndex]; }
static void WriteResults(ResultCollection r, int tab = 0) { foreach (var v in r) { // Print the current value's type, name and value. Console.WriteLine($"{new string('\t', tab)}{v.TypeName} {v.Name} : {v.Value}"); // If the current value is a struct, bitfield or array, print all its children. if (v.HasChildren) WriteResults(v.Children, tab + 1); } }
public override void Analyze(ISymbolicExpressionTree[] trees, double[] qualities, ResultCollection results, IRandom random) { const string bestSolutionResultName = "Best Solution"; var bestQuality = Maximization ? qualities.Max() : qualities.Min(); var bestIdx = Array.IndexOf(qualities, bestQuality); if (!results.ContainsKey(bestSolutionResultName)) { results.Add(new Result(bestSolutionResultName, new Solution(trees[bestIdx], LawnLengthParameter.Value.Value, LawnWidthParameter.Value.Value, bestQuality))); } else if (((Solution)(results[bestSolutionResultName].Value)).Quality < qualities[bestIdx]) { results[bestSolutionResultName].Value = new Solution(trees[bestIdx], LawnLengthParameter.Value.Value, LawnWidthParameter.Value.Value, bestQuality); } }
public void PassedIsFalseWhenAnyFailureResultsExists() { var success = new SuccessResult("good"); var pending = new PendingResult("pending"); var failure = new FailureResult("bad", new Exception("djsfdsf")); var sut = new ResultCollection(); sut.Add(success); sut.Add(pending); sut.Add(failure); Assert.IsFalse(sut.Passed.Value); }
public void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) { // Use vars.yourVariable to access variables in the variable store i.e. yourVariable // Write or update results given the range of vectors and resulting qualities // Uncomment the following lines if you want to retrieve the best individual //var orderedIndividuals = individuals.Zip(qualities, (i, q) => new { Individual = i, Quality = q }).OrderBy(z => z.Quality); //var best = Maximization ? orderedIndividuals.Last().Individual : orderedIndividuals.First().Individual; //if (!results.ContainsKey("Best Solution")) { // results.Add(new Result("Best Solution", typeof(RealVector))); //} //results["Best Solution"].Value = (IItem)best.RealVector("r").Clone(); }
public void PassedIsTrueNullWhenNoFailureOrPending() { var success1 = new SuccessResult("good"); var success2 = new SuccessResult("good"); var success3 = new SuccessResult("good"); var sut = new ResultCollection(); sut.Add(success1); sut.Add(success2); sut.Add(success3); Assert.IsTrue(sut.Passed.Value); }
private void ClearHistoryRecursively(ResultCollection results) { var values = results.Select(r => r.Value); // Reset all values within results in results foreach (var resultsCollection in values.OfType<ResultCollection>()) ClearHistoryRecursively(resultsCollection); // Reset values foreach (var dataTable in values.OfType<DataTable>()) { foreach (var row in dataTable.Rows) for (int i = 0; i < row.Values.Count; i++) row.Values[i] = double.NaN; } }
public void PassedIsFalsNullWhenPendingResultExistsWithNoFailures() { var success1 = new SuccessResult("good"); var success2 = new SuccessResult("good"); var success3 = new SuccessResult("good"); var pending = new PendingResult("pending"); var sut = new ResultCollection(); sut.Add(success1); sut.Add(success2); sut.Add(pending); sut.Add(success3); Assert.IsNull(sut.Passed); }
// implementation based on Java version: www.aicas.com/download/Whetstone.java public override void Run(CancellationToken cancellationToken, ResultCollection results) { bool stopBenchmark = false; ITERATIONS = 100; // ITERATIONS / 10 = Millions Whetstone instructions numberOfCycles = 100; int defaultNumberOfRuns = 10; float elapsedTime = 0; float meanTime = 0; float rating = 0; float meanRating = 0; int intRating = 0; long runNumber = 1; Stopwatch sw = new Stopwatch(); sw.Start(); while (!stopBenchmark) { elapsedTime = (float)(MainCalc() / 1000); meanTime = meanTime + (elapsedTime * 1000 / numberOfCycles); rating = (1000 * numberOfCycles) / elapsedTime; meanRating = meanRating + rating; intRating = (int)rating; numberOfCycles += 10; if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException(cancellationToken); } if ((TimeLimit == null) || (TimeLimit.TotalMilliseconds == 0)) { if (runNumber > defaultNumberOfRuns) { stopBenchmark = true; } } else if (sw.Elapsed > TimeLimit) { stopBenchmark = true; } runNumber++; } sw.Stop(); meanTime = meanTime / runNumber; meanRating = meanRating / runNumber; intRating = (int)meanRating; results.Add(new Result("MWIPS", new IntValue(intRating / 1000))); }
protected override void Analyze(ItemArray<DoubleArray> qualities, ResultCollection results) { ItemArray<IntValue> ranks = RankParameter.ActualValue; bool populationLevel = RankParameter.Depth == 1; int objectives = qualities[0].Length; int frontSize = ranks.Count(x => x.Value == 0); ItemArray<IScope> paretoArchive = null; if (populationLevel) paretoArchive = new ItemArray<IScope>(frontSize); DoubleMatrix front = new DoubleMatrix(frontSize, objectives); int counter = 0; for (int i = 0; i < ranks.Length; i++) { if (ranks[i].Value == 0) { for (int k = 0; k < objectives; k++) front[counter, k] = qualities[i][k]; if (populationLevel) { paretoArchive[counter] = (IScope)ExecutionContext.Scope.SubScopes[i].Clone(); } counter++; } } front.RowNames = GetRowNames(front); front.ColumnNames = GetColumnNames(front); if (results.ContainsKey("Pareto Front")) results["Pareto Front"].Value = front; else results.Add(new Result("Pareto Front", front)); if (populationLevel) { if (results.ContainsKey("Pareto Archive")) results["Pareto Archive"].Value = paretoArchive; else results.Add(new Result("Pareto Archive", paretoArchive)); } }
public EmptyAlgorithm() : base() { results = new ResultCollection(); }
public override void Analyze(ISymbolicExpressionTree[] trees, double[] qualities, ResultCollection results, IRandom random) { // find the tree with the best quality double maxQuality = double.NegativeInfinity; ISymbolicExpressionTree bestTree = null; for (int i = 0; i < qualities.Length; i++) { if (qualities[i] > maxQuality) { maxQuality = qualities[i]; bestTree = trees[i]; } } // create a solution instance var bestSolution = new Solution(bestTree, RobocodePath, NrOfRounds, Enemies); // also add the best solution as a result to the result collection // or alternatively update the existing result if (!results.ContainsKey("BestSolution")) { results.Add(new Result("BestSolution", "The best tank program", bestSolution)); } else { results["BestSolution"].Value = bestSolution; } }
public SymbolicDiscriminantFunctionClassificationSolution(ISymbolicDiscriminantFunctionClassificationModel model, IClassificationProblemData problemData) : base(model, problemData) { foreach (var node in model.SymbolicExpressionTree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTopLevelNode>()) node.SetGrammar(null); Add(new Result(ModelLengthResultName, "Length of the symbolic classification model.", new IntValue())); Add(new Result(ModelDepthResultName, "Depth of the symbolic classification model.", new IntValue())); ResultCollection estimationLimitResults = new ResultCollection(); estimationLimitResults.Add(new Result(EstimationLimitsResultName, "", new DoubleLimit())); estimationLimitResults.Add(new Result(TrainingUpperEstimationLimitHitsResultName, "", new IntValue())); estimationLimitResults.Add(new Result(TestUpperEstimationLimitHitsResultName, "", new IntValue())); estimationLimitResults.Add(new Result(TrainingLowerEstimationLimitHitsResultName, "", new IntValue())); estimationLimitResults.Add(new Result(TestLowerEstimationLimitHitsResultName, "", new IntValue())); estimationLimitResults.Add(new Result(TrainingNaNEvaluationsResultName, "", new IntValue())); estimationLimitResults.Add(new Result(TestNaNEvaluationsResultName, "", new IntValue())); Add(new Result(EstimationLimitsResultsResultName, "Results concerning the estimation limits of symbolic regression solution", estimationLimitResults)); CalculateResults(); }
private void AfterDeserialization() { if (!ContainsKey(EstimationLimitsResultsResultName)) { ResultCollection estimationLimitResults = new ResultCollection(); estimationLimitResults.Add(new Result(EstimationLimitsResultName, "", new DoubleLimit())); estimationLimitResults.Add(new Result(TrainingUpperEstimationLimitHitsResultName, "", new IntValue())); estimationLimitResults.Add(new Result(TestUpperEstimationLimitHitsResultName, "", new IntValue())); estimationLimitResults.Add(new Result(TrainingLowerEstimationLimitHitsResultName, "", new IntValue())); estimationLimitResults.Add(new Result(TestLowerEstimationLimitHitsResultName, "", new IntValue())); estimationLimitResults.Add(new Result(TrainingNaNEvaluationsResultName, "", new IntValue())); estimationLimitResults.Add(new Result(TestNaNEvaluationsResultName, "", new IntValue())); Add(new Result(EstimationLimitsResultsResultName, "Results concerning the estimation limits of symbolic regression solution", estimationLimitResults)); CalculateResults(); } }
protected abstract void Analyze(ItemArray<DoubleArray> qualities, ResultCollection results);
public static double RunBenchmark() { Linpack linpack = new Linpack(); ResultCollection results = new ResultCollection(); linpack.Run(new System.Threading.CancellationToken(), results); DoubleValue mflops = (DoubleValue)results["Mflops/s"].Value; return mflops.Value; }
/// <summary> /// Initializes a new instance of the Google.CustomSearch.SearchResult /// class specifying the ResultCollection to populate this SearchResult. /// </summary> /// <param name="results">The ResultCollection to populate this SearchResult</param> public SearchResult(ResultCollection results) : this() { this.Results = results; }
void ISingleObjectiveProblemDefinition.Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) { CompiledProblemDefinition.Analyze(individuals, qualities, results, random); }
private BenchmarkAlgorithm(BenchmarkAlgorithm original, Cloner cloner) { if (original.ExecutionState == ExecutionState.Started) throw new InvalidOperationException(string.Format("Clone not allowed in execution state \"{0}\".", ExecutionState)); cloner.RegisterClonedObject(original, this); name = original.name; description = original.description; parameters = cloner.Clone(original.parameters); readOnlyParameters = null; executionState = original.executionState; executionTime = original.executionTime; storeAlgorithmInEachRun = original.storeAlgorithmInEachRun; runsCounter = original.runsCounter; Runs = cloner.Clone(original.runs); results = cloner.Clone(original.results); }
protected BasicAlgorithm(BasicAlgorithm original, Cloner cloner) : base(original, cloner) { results = cloner.Clone(original.Results); }
private void CollectResultsRecursively(string path, ResultCollection results, IDictionary<string, IItem> values) { foreach (IResult result in results) { values.Add(path + result.Name, result.Value); ResultCollection childCollection = result.Value as ResultCollection; if (childCollection != null) { CollectResultsRecursively(path + result.Name + ".", childCollection, values); } } }
// implementation based on Java version: http://www.netlib.org/benchmark/linpackjava/ public override void Run(CancellationToken token, ResultCollection results) { cancellationToken = token; bool stopBenchmark = false; TimeSpan executionTime = new TimeSpan(); bool resultAchieved = false; do { int n = DEFAULT_PSIZE; int ldaa = DEFAULT_PSIZE; int lda = DEFAULT_PSIZE + 1; double[][] a = new double[ldaa][]; double[] b = new double[ldaa]; double[] x = new double[ldaa]; double ops; double norma; double normx; double resid; int i; int info; int[] ipvt = new int[ldaa]; for (i = 0; i < ldaa; i++) { a[i] = new double[lda]; } ops = (2.0e0 * (((double)n) * n * n)) / 3.0 + 2.0 * (n * n); norma = mathGen(a, lda, n, b); if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException(cancellationToken); } sw.Reset(); sw.Start(); info = dgefa(a, lda, n, ipvt); if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException(cancellationToken); } dgesl(a, lda, n, ipvt, b, 0); sw.Stop(); total = sw.Elapsed.TotalMilliseconds / 1000; if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException(cancellationToken); } for (i = 0; i < n; i++) { x[i] = b[i]; } norma = mathGen(a, lda, n, b); for (i = 0; i < n; i++) { b[i] = -b[i]; } dmxpy(n, b, n, lda, x, a); resid = 0.0; normx = 0.0; for (i = 0; i < n; i++) { resid = (resid > abs(b[i])) ? resid : abs(b[i]); normx = (normx > abs(x[i])) ? normx : abs(x[i]); } eps_result = epslon((double)1.0); residn_result = resid / (n * norma * normx * eps_result); residn_result += 0.005; // for rounding residn_result = (int)(residn_result * 100); residn_result /= 100; time_result = total; time_result += 0.005; // for rounding time_result = (int)(time_result * 100); time_result /= 100; mflops_result = ops / (1.0e6 * total); mflops_result += 0.0005; // for rounding mflops_result = (int)(mflops_result * 1000); mflops_result /= 1000; if (!resultAchieved) { results.Add(new Result("Mflops/s", new DoubleValue(mflops_result))); results.Add(new Result("Total Mflops/s", new DoubleValue(mflops_result * Environment.ProcessorCount))); resultAchieved = true; } executionTime += sw.Elapsed; if ((TimeLimit == null) || (TimeLimit.TotalMilliseconds == 0)) stopBenchmark = true; else if (executionTime > TimeLimit) stopBenchmark = true; } while (!stopBenchmark); }
// implementation based on Java version: http://www.okayan.jp/DhrystoneApplet/dhry_src.jar public override void Run(CancellationToken cancellationToken, ResultCollection results) { bool stopBenchmark = false; int Int_Loc_1; int Int_Loc_2; int Int_Loc_3; int[] Int_Loc_1_Ref = new int[1]; int[] Int_Loc_3_Ref = new int[1]; char Char_Index; int[] Enum_Loc = new int[1]; string String_Loc_1; string String_Loc_2; long total_time; long Run_Index; Next_Record_Glob = Second_Record; Record_Glob = First_Record; Record_Glob.Record_Comp = Next_Record_Glob; Record_Glob.Discr = Ident_1; Record_Glob.Enum_Comp = Ident_3; Record_Glob.Int_Comp = 40; Record_Glob.String_Comp = "DHRYSTONE PROGRAM, SOME STRING"; String_Loc_1 = "DHRYSTONE PROGRAM, 1'ST STRING"; for (int i = 0; i < 128; i++) { Array_Glob_2[i] = new int[128]; } Stopwatch sw = new Stopwatch(); sw.Start(); Run_Index = 1; while (!stopBenchmark) { Proc_5(); Proc_4(); Int_Loc_1 = 2; Int_Loc_2 = 3; String_Loc_2 = "DHRYSTONE PROGRAM, 2'ND STRING"; Enum_Loc[0] = Ident_2; Bool_Glob = !Func_2(String_Loc_1, String_Loc_2); while (Int_Loc_1 < Int_Loc_2) { Int_Loc_3_Ref[0] = 5 * Int_Loc_1 - Int_Loc_2; Proc_7(Int_Loc_1, Int_Loc_2, Int_Loc_3_Ref); Int_Loc_1 += 1; } Int_Loc_3 = Int_Loc_3_Ref[0]; Proc_8(Array_Glob_1, Array_Glob_2, Int_Loc_1, Int_Loc_3); Proc_1(Record_Glob); for (Char_Index = 'A'; Char_Index <= Char_Glob_2; ++Char_Index) { if (Enum_Loc[0] == Func_1(Char_Index, 'C')) Proc_6(Ident_1, Enum_Loc); } Int_Loc_3 = Int_Loc_2 * Int_Loc_1; Int_Loc_2 = Int_Loc_3 / Int_Loc_1; Int_Loc_2 = 7 * (Int_Loc_3 - Int_Loc_2) - Int_Loc_1; Int_Loc_1_Ref[0] = Int_Loc_1; Proc_2(Int_Loc_1_Ref); Int_Loc_1 = Int_Loc_1_Ref[0]; if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException(cancellationToken); } if ((TimeLimit == null) || (TimeLimit.TotalMilliseconds == 0)) { if (Run_Index > Default_Number_Of_Runs) { stopBenchmark = true; } } else if (sw.Elapsed > TimeLimit) { stopBenchmark = true; } Run_Index++; } sw.Stop(); total_time = sw.ElapsedMilliseconds; results.Add(new Result("DIPS", new DoubleValue(Run_Index * 1000 / total_time))); }
public BenchmarkAlgorithm() { name = ItemName; description = ItemDescription; parameters = new ParameterCollection(); readOnlyParameters = null; executionState = ExecutionState.Stopped; executionTime = TimeSpan.Zero; storeAlgorithmInEachRun = false; runsCounter = 0; Runs = new RunCollection() { OptimizerName = name }; results = new ResultCollection(); CreateParameters(); DiscoverBenchmarks(); Prepare(); }
void SetStructureObjects(ResultCollection collection) { tlvStructure.SetObjects(collection); switch (Settings.I.AutoExpand) { case Settings.AutoExpandType.None: break; case Settings.AutoExpandType.Bitfields: foreach (var v in collection) if (v.Type == NodeType.Bitfield) tlvStructure.Expand(v); break; case Settings.AutoExpandType.All: tlvStructure.ExpandAll(); break; } }
public abstract void Run(CancellationToken token, ResultCollection results);
public override IOperation Apply() { int updateInterval = UpdateIntervalParameter.Value.Value; IntValue updateCounter = UpdateCounterParameter.ActualValue; // if counter does not yet exist then initialize it with update interval // to make sure the solutions are analyzed on the first application of this operator if (updateCounter == null) { updateCounter = new IntValue(updateInterval); UpdateCounterParameter.ActualValue = updateCounter; } else updateCounter.Value++; //analyze solutions only every 'updateInterval' times if (updateCounter.Value == updateInterval) { updateCounter.Value = 0; bool max = MaximizationParameter.ActualValue.Value; ItemArray<DoubleValue> qualities = QualityParameter.ActualValue; bool storeHistory = StoreHistoryParameter.Value.Value; int count = CurrentScopeParameter.ActualValue.SubScopes.Count; if (count > 1) { // calculate solution similarities var similarityMatrix = SimilarityCalculator.CalculateSolutionCrowdSimilarity(CurrentScopeParameter.ActualValue); // sort similarities by quality double[][] sortedSimilarityMatrix = null; if (max) sortedSimilarityMatrix = similarityMatrix .Select((x, index) => new { Solutions = x, Quality = qualities[index] }) .OrderByDescending(x => x.Quality) .Select(x => x.Solutions) .ToArray(); else sortedSimilarityMatrix = similarityMatrix .Select((x, index) => new { Solutions = x, Quality = qualities[index] }) .OrderBy(x => x.Quality) .Select(x => x.Solutions) .ToArray(); double[,] similarities = new double[similarityMatrix.Length, similarityMatrix[0].Length]; for (int i = 0; i < similarityMatrix.Length; i++) for (int j = 0; j < similarityMatrix[0].Length; j++) similarities[i, j] = similarityMatrix[i][j]; // calculate minimum, average and maximum similarities double similarity; double[] minSimilarities = new double[count]; double[] avgSimilarities = new double[count]; double[] maxSimilarities = new double[count]; for (int i = 0; i < count; i++) { minSimilarities[i] = 1; avgSimilarities[i] = 0; maxSimilarities[i] = 0; for (int j = 0; j < count; j++) { if (i != j) { similarity = similarities[i, j]; if ((similarity < 0) || (similarity > 1)) throw new InvalidOperationException("Solution similarities have to be in the interval [0;1]."); if (minSimilarities[i] > similarity) minSimilarities[i] = similarity; avgSimilarities[i] += similarity; if (maxSimilarities[i] < similarity) maxSimilarities[i] = similarity; } } avgSimilarities[i] = avgSimilarities[i] / (count - 1); } double avgMinSimilarity = minSimilarities.Average(); double avgAvgSimilarity = avgSimilarities.Average(); double avgMaxSimilarity = maxSimilarities.Average(); // fetch results collection ResultCollection results; if (!ResultsParameter.ActualValue.ContainsKey(Name + " Results")) { results = new ResultCollection(); ResultsParameter.ActualValue.Add(new Result(Name + " Results", results)); } else { results = (ResultCollection)ResultsParameter.ActualValue[Name + " Results"].Value; } // store similarities HeatMap similaritiesHeatMap = new HeatMap(similarities, "Solution Similarities", 0.0, 1.0); if (!results.ContainsKey("Solution Similarities")) results.Add(new Result("Solution Similarities", similaritiesHeatMap)); else results["Solution Similarities"].Value = similaritiesHeatMap; // store similarities history if (storeHistory) { if (!results.ContainsKey("Solution Similarities History")) { HeatMapHistory history = new HeatMapHistory(); history.Add(similaritiesHeatMap); results.Add(new Result("Solution Similarities History", history)); } else { ((HeatMapHistory)results["Solution Similarities History"].Value).Add(similaritiesHeatMap); } } // store average minimum, average and maximum similarity if (!results.ContainsKey("Average Minimum Solution Similarity")) results.Add(new Result("Average Minimum Solution Similarity", new DoubleValue(avgMinSimilarity))); else ((DoubleValue)results["Average Minimum Solution Similarity"].Value).Value = avgMinSimilarity; if (!results.ContainsKey("Average Average Solution Similarity")) results.Add(new Result("Average Average Solution Similarity", new DoubleValue(avgAvgSimilarity))); else ((DoubleValue)results["Average Average Solution Similarity"].Value).Value = avgAvgSimilarity; if (!results.ContainsKey("Average Maximum Solution Similarity")) results.Add(new Result("Average Maximum Solution Similarity", new DoubleValue(avgMaxSimilarity))); else ((DoubleValue)results["Average Maximum Solution Similarity"].Value).Value = avgMaxSimilarity; // store average minimum, average and maximum solution similarity data table DataTable minAvgMaxSimilarityDataTable; if (!results.ContainsKey("Average Minimum/Average/Maximum Solution Similarity")) { minAvgMaxSimilarityDataTable = new DataTable("Average Minimum/Average/Maximum Solution Similarity"); minAvgMaxSimilarityDataTable.VisualProperties.XAxisTitle = "Iteration"; minAvgMaxSimilarityDataTable.VisualProperties.YAxisTitle = "Solution Similarity"; minAvgMaxSimilarityDataTable.Rows.Add(new DataRow("Average Minimum Solution Similarity", null)); minAvgMaxSimilarityDataTable.Rows["Average Minimum Solution Similarity"].VisualProperties.StartIndexZero = true; minAvgMaxSimilarityDataTable.Rows.Add(new DataRow("Average Average Solution Similarity", null)); minAvgMaxSimilarityDataTable.Rows["Average Average Solution Similarity"].VisualProperties.StartIndexZero = true; minAvgMaxSimilarityDataTable.Rows.Add(new DataRow("Average Maximum Solution Similarity", null)); minAvgMaxSimilarityDataTable.Rows["Average Maximum Solution Similarity"].VisualProperties.StartIndexZero = true; results.Add(new Result("Average Minimum/Average/Maximum Solution Similarity", minAvgMaxSimilarityDataTable)); } else { minAvgMaxSimilarityDataTable = (DataTable)results["Average Minimum/Average/Maximum Solution Similarity"].Value; } minAvgMaxSimilarityDataTable.Rows["Average Minimum Solution Similarity"].Values.Add(avgMinSimilarity); minAvgMaxSimilarityDataTable.Rows["Average Average Solution Similarity"].Values.Add(avgAvgSimilarity); minAvgMaxSimilarityDataTable.Rows["Average Maximum Solution Similarity"].Values.Add(avgMaxSimilarity); // store minimum, average, maximum similarities data table DataTable minAvgMaxSimilaritiesDataTable = new DataTable("Minimum/Average/Maximum Solution Similarities"); minAvgMaxSimilaritiesDataTable.VisualProperties.XAxisTitle = "Solution Index"; minAvgMaxSimilaritiesDataTable.VisualProperties.YAxisTitle = "Solution Similarity"; minAvgMaxSimilaritiesDataTable.Rows.Add(new DataRow("Minimum Solution Similarity", null, minSimilarities)); minAvgMaxSimilaritiesDataTable.Rows["Minimum Solution Similarity"].VisualProperties.ChartType = DataRowVisualProperties.DataRowChartType.Points; minAvgMaxSimilaritiesDataTable.Rows.Add(new DataRow("Average Solution Similarity", null, avgSimilarities)); minAvgMaxSimilaritiesDataTable.Rows["Average Solution Similarity"].VisualProperties.ChartType = DataRowVisualProperties.DataRowChartType.Points; minAvgMaxSimilaritiesDataTable.Rows.Add(new DataRow("Maximum Solution Similarity", null, maxSimilarities)); minAvgMaxSimilaritiesDataTable.Rows["Maximum Solution Similarity"].VisualProperties.ChartType = DataRowVisualProperties.DataRowChartType.Points; if (!results.ContainsKey("Minimum/Average/Maximum Solution Similarities")) { results.Add(new Result("Minimum/Average/Maximum Solution Similarities", minAvgMaxSimilaritiesDataTable)); } else { results["Minimum/Average/Maximum Solution Similarities"].Value = minAvgMaxSimilaritiesDataTable; } // store minimum, average, maximum similarities history if (storeHistory) { if (!results.ContainsKey("Minimum/Average/Maximum Solution Similarities History")) { DataTableHistory history = new DataTableHistory(); history.Add(minAvgMaxSimilaritiesDataTable); results.Add(new Result("Minimum/Average/Maximum Solution Similarities History", history)); } else { ((DataTableHistory)results["Minimum/Average/Maximum Solution Similarities History"].Value).Add(minAvgMaxSimilaritiesDataTable); } } } } return base.Apply(); }
protected BasicAlgorithm() : base() { results = new ResultCollection(); }
private static HtmlBuilder LinkTables( this HtmlBuilder hb, IEnumerable <SiteSettings> ssList, DataSet dataSet, string direction, string caption) { ssList.ForEach(ss => hb.Table(css: "grid", action: () => { var dataRows = dataSet.Tables[ss.ReferenceType + "_" + direction]? .AsEnumerable() .Where(o => o["SiteId"].ToLong() == ss.SiteId); var siteMenu = SiteInfo.TenantCaches[Sessions.TenantId()].SiteMenu; if (dataRows != null && dataRows.Any()) { ss.SetColumnAccessControls(); var columns = ss.GetLinkColumns(checkPermission: true); switch (ss.ReferenceType) { case "Issues": var issueCollection = new IssueCollection(ss, dataRows); issueCollection.SetLinks(ss); hb .Caption(caption: "{0} : {1} - {2} {3}".Params( caption, siteMenu.Breadcrumb(ss.SiteId) .Select(o => o.Title) .Join(" > "), Displays.Quantity(), dataRows.Count())) .THead(action: () => hb .GridHeader(columns: columns, sort: false, checkRow: false)) .TBody(action: () => issueCollection .ForEach(issueModel => { ss.SetColumnAccessControls(issueModel.Mine()); hb.Tr( attributes: new HtmlAttributes() .Class("grid-row") .DataId(issueModel.IssueId.ToString()), action: () => columns .ForEach(column => hb .TdValue( ss: ss, column: column, issueModel: issueModel))); })); break; case "Results": var resultCollection = new ResultCollection(ss, dataRows); resultCollection.SetLinks(ss); hb .Caption(caption: "{0} : {1} - {2} {3}".Params( caption, siteMenu.Breadcrumb(ss.SiteId) .Select(o => o.Title) .Join(" > "), Displays.Quantity(), dataRows.Count())) .THead(action: () => hb .GridHeader(columns: columns, sort: false, checkRow: false)) .TBody(action: () => resultCollection .ForEach(resultModel => { ss.SetColumnAccessControls(resultModel.Mine()); hb.Tr( attributes: new HtmlAttributes() .Class("grid-row") .DataId(resultModel.ResultId.ToString()), action: () => columns .ForEach(column => hb .TdValue( ss: ss, column: column, resultModel: resultModel))); })); break; } } })); return(hb); }