// // Compares all generated problems: // If a problem has a subset of givens (compared to another problem) then the problem with the subset is chosen. // If a problem has the same givens and goal, the shorter (edge-based) problem is chosen. // public List <Problem <Hypergraph.EdgeAnnotation> > FilterForMinimalAndRedundantProblems(List <Problem <Hypergraph.EdgeAnnotation> > problems) { List <Problem <Hypergraph.EdgeAnnotation> > filtered = new List <Problem <Hypergraph.EdgeAnnotation> >(); // It is possible for no problems to be generated if (!problems.Any()) { return(problems); } // For each problem, break the givens into actual vs. suppressed given information problems.ForEach(problem => problem.DetermineSuppressedGivens(graph)); // // Filter the problems based on same set of source nodes and goal node // All of these problems have exactly the same goal node. // Now, if we have multiple problems with the exact same (non-suppressed) source nodes, choose the one with shortest path. // bool[] marked = new bool[problems.Count]; for (int p1 = 0; p1 < problems.Count - 1; p1++) { // We may have marked this earlier if (!marked[p1]) { // Save the minimal problem Problem <Hypergraph.EdgeAnnotation> minimalProblem = problems[p1]; for (int p2 = p1 + 1; p2 < problems.Count; p2++) { // If we have not yet compared to a problem if (!marked[p2]) { // Both problems need the same goal node if (minimalProblem.goal == problems[p2].goal) { // Check if the givens from the minimal problem and this candidate problem equate exactly if (Utilities.EqualSets <int>(minimalProblem.givens, problems[p2].givens)) { // We have now analyzed this problem marked[p2] = true; // Choose the shorter problem (fewer edges wins) if (problems[p2].edges.Count < minimalProblem.edges.Count) { // if (Utilities.PROBLEM_GEN_DEBUG) Debug.WriteLine("Outer Filtering: " + minimalProblem.ToString() + " for " + problems[p2].ToString()); minimalProblem = problems[p2]; } else { // if (Utilities.PROBLEM_GEN_DEBUG) Debug.WriteLine("Outer Filtering: " + problems[p2].ToString() + " for " + minimalProblem.ToString()); } } // Check if the givens from new problem are a subset of the givens of the minimal problem. else if (Utilities.Subset <int>(minimalProblem.givens, problems[p2].givens)) { marked[p2] = true; if (Utilities.PROBLEM_GEN_DEBUG || Utilities.BACKWARD_PROBLEM_GEN_DEBUG) { // Debug.WriteLine("Filtering for Minimal Givens: " + minimalProblem.ToString() + " for " + problems[p2].ToString()); } minimalProblem = problems[p2]; } // Check if the givens from new problem are a subset of the givens of the minimal problem. else if (Utilities.Subset <int>(problems[p2].givens, minimalProblem.givens)) { marked[p2] = true; if (Utilities.PROBLEM_GEN_DEBUG || Utilities.BACKWARD_PROBLEM_GEN_DEBUG) { // Debug.WriteLine("Filtering for Minimal Givens: " + problems[p2].ToString() + " for " + minimalProblem.ToString()); } } } } } // Add the minimal problem to the list to be returned filtered.Add(minimalProblem); } } // Pick up last problem in the list if (!marked[problems.Count - 1]) { filtered.Add(problems[problems.Count - 1]); } if (Utilities.PROBLEM_GEN_DEBUG) { Debug.WriteLine("Generated Problems: " + problems.Count); Debug.WriteLine("Filtered Problems: " + (problems.Count - filtered.Count)); Debug.WriteLine("Problems Remaining: " + filtered.Count); } if (problems.Count < filtered.Count) { Debug.WriteLine("Filtered list is larger than original list!"); } return(filtered); }
public void Add(Problem <Hypergraph.EdgeAnnotation> p) { elements.Add(p); }
// // Given a problem and query vector determine strict isomorphism between this problem and this partition of problems // public bool IsStrictlyIsomorphic(Problem <Hypergraph.EdgeAnnotation> newProblem, QueryFeatureVector query) { // // GOAL // if (query.goalIsomorphism) { if (!AreNodesIsomorphic(elements[0].goal, newProblem.goal)) { return(false); } } // // LENGTH // if (query.lengthPartitioning) { if (query.rangedLengthPartitioning) { if (!AreRangedEqualLength(query, elements[0], newProblem)) { return(false); } } else { if (!AreEqualLength(elements[0], newProblem)) { return(false); } } } // // WIDTH // if (query.widthPartitioning) { if (query.rangedWidthPartitioning) { if (!AreRangedEqualWidth(query, elements[0], newProblem)) { return(false); } } else { if (!AreEqualWidth(elements[0], newProblem)) { return(false); } } } // // DEDUCTIVE STEPS // if (query.deductiveStepsPartitioning) { if (query.rangedDeductiveStepsPartitioning) { if (!AreRangedEqualDeductiveSteps(query, elements[0], newProblem)) { return(false); } } else { if (!AreEqualDeductiveSteps(elements[0], newProblem)) { return(false); } } } // // Add other query checks here.... // // // Interestingness query (% of givens covered) // if (query.interestingPartitioning) { if (!AreRangedEqualInteresting(query, elements[0], newProblem)) { return(false); } } // // SOURCE NODE // if (query.sourceIsomorphism) { if (!AreSourceNodesIsomorphic(elements[0].givens, newProblem.givens)) { return(false); } } return(true); }
private bool AreRangedEqualDeductiveSteps(QueryFeatureVector query, Problem <Hypergraph.EdgeAnnotation> thisProblem, Problem <Hypergraph.EdgeAnnotation> thatProblem) { return(query.stepsPartitions.GetPartitionIndex(thisProblem.GetNumDeductiveSteps()) == query.stepsPartitions.GetPartitionIndex(thatProblem.GetNumDeductiveSteps())); }
private bool AreRangedEqualInteresting(QueryFeatureVector query, Problem <Hypergraph.EdgeAnnotation> thisProblem, Problem <Hypergraph.EdgeAnnotation> thatProblem) { return(query.interestingPartitions.GetPartitionIndex(thisProblem.interestingPercentage) == query.interestingPartitions.GetPartitionIndex(thatProblem.interestingPercentage)); }
private bool AreRangedEqualWidth(QueryFeatureVector query, Problem <Hypergraph.EdgeAnnotation> thisProblem, Problem <Hypergraph.EdgeAnnotation> thatProblem) { return(query.widthPartitions.GetPartitionIndex(thisProblem.GetWidth()) == query.widthPartitions.GetPartitionIndex(thatProblem.GetWidth())); }
private bool AreEqualDeductiveSteps(Problem <Hypergraph.EdgeAnnotation> thisProblem, Problem <Hypergraph.EdgeAnnotation> thatProblem) { return(thisProblem.GetNumDeductiveSteps() == thatProblem.GetNumDeductiveSteps()); }
private bool AreEqualWidth(Problem <Hypergraph.EdgeAnnotation> thisProblem, Problem <Hypergraph.EdgeAnnotation> thatProblem) { return(thisProblem.GetWidth() == thatProblem.GetWidth()); }
// // Create a new problem based on thisProblem and thatProblem in accordance with the above comments (repeated here) // // This problem { This Givens } { This Path } -> This Goal // The new problem is of the form: { That Givens } { That Path } -> Goal // Combined: { New Givens U This Givens \minus This Goal} {This Path U This Goal } -> Goal // public void Append(Hypergraph.Hypergraph <ConcreteAST.GroundedClause, Hypergraph.EdgeAnnotation> graph, HyperEdgeMultiMap <A> forwardEdges, Problem <A> thatProblem) { if (thatProblem.goal == -1) { throw new ArgumentException("Attempt to append with an empty problem " + this + " " + thatProblem); } // // If this is an empty problem, populate it like a copy constructor and return // if (this.goal == -1) { givens = new List <int>(thatProblem.givens); goal = thatProblem.goal; path = new List <int>(thatProblem.path); edges = new List <PebblerHyperEdge <A> >(thatProblem.edges); suppressedGivens = new List <int>(thatProblem.suppressedGivens); thatProblem.edges.ForEach(edge => this.AddEdge(edge)); return; } // // Standard appending of an existent problem to another existent problem // if (!this.givens.Contains(thatProblem.goal)) { throw new ArgumentException("Attempt to append problems that do not connect goal->given" + this + " " + thatProblem); } // Degenerate by removing the new problem goal from THIS source node. this.givens.Remove(thatProblem.goal); // Add the 'new problem' goal node to the path of the new Problem (uniquely) Utilities.AddUnique <int>(this.path, thatProblem.goal); // Add the path nodes to THIS path Utilities.AddUniqueList <int>(this.path, thatProblem.path); // Add all the new sources to the degenerated old sources; do so uniquely Utilities.AddUniqueList <int>(this.givens, thatProblem.givens); Utilities.AddUniqueList <int>(this.suppressedGivens, thatProblem.suppressedGivens); // Add all of the edges of that problem to this problem; this also adds to the problem graph thatProblem.edges.ForEach(edge => this.AddEdge(edge)); if (this.ContainsCycle()) { throw new Exception("Problem contains a cycle" + this.graph.GetStronglyConnectedComponentDump()); // Remove an edge from this problem? } // Now, if there exists a node in the path AND in the givens, remove it from the givens. foreach (int p in this.path) { if (this.givens.Remove(p)) { // if (Utilities.PROBLEM_GEN_DEBUG) System.Diagnostics.Debug.WriteLine("A node existed in the path AND givens (" + p + "); removing from givens"); } } PerformDeducibilityCheck(forwardEdges); }
// // A problem is defined as interesting if: // 1. It is minimal in its given information // 2. The problem implies all of the facts of the given figure; that is, if the set of all the facts of a figure are not in the source of the problem, then reject // // Returns a private double[] InterestingProblemCoverage(Problem <Hypergraph.EdgeAnnotation> problem) { List <int> problemGivens = problem.givens; // // Collect all of the figure intrinsic covered nodes // List <int> intrinsicCollection = new List <int>(); foreach (int src in problem.givens) { Utilities.AddUniqueList <int>(intrinsicCollection, graph.vertices[src].data.figureComponents); } // Sort is not required, but for debug is easier to digest intrinsicCollection.Sort(); // DEBUG //System.Diagnostics.Debug.WriteLine("\n" + problem + "\nCovered Nodes: "); //foreach (int coveredNode in intrinsicCollection) //{ // System.Diagnostics.Debug.WriteLine("\t" + coveredNode); //} // // Calculate the // int[] numCoveredNodes = new int[NUM_INTRINSIC]; int[] numUncoveredNodes = new int[NUM_INTRINSIC]; int totalCovered = 0; int totalUncovered = 0; foreach (GroundedClause gc in figure) { // System.Diagnostics.Debug.WriteLine("Checking: " + gc.ToString()); if (intrinsicCollection.Contains(gc.clauseId)) { if (gc is Point) { numCoveredNodes[POINTS]++; } else if (gc is Segment) { numCoveredNodes[SEGMENTS]++; } else if (gc is Angle) { numCoveredNodes[ANGLES]++; } else if (gc is Intersection) { numCoveredNodes[INTERSECTION]++; } else if (gc is Triangle) { numCoveredNodes[TRIANGLES]++; } else if (gc is InMiddle) { numCoveredNodes[IN_MIDDLES]++; } totalCovered++; } else { if (INTERESTING_DEBUG) { System.Diagnostics.Debug.WriteLine("Uncovered: " + gc.ToString()); } if (gc is Point) { numUncoveredNodes[POINTS]++; } else if (gc is Segment) { numUncoveredNodes[SEGMENTS]++; } else if (gc is Angle) { numUncoveredNodes[ANGLES]++; } else if (gc is Intersection) { numUncoveredNodes[INTERSECTION]++; } else if (gc is Triangle) { numUncoveredNodes[TRIANGLES]++; } else if (gc is InMiddle) { numUncoveredNodes[IN_MIDDLES]++; } totalUncovered++; } } if (INTERESTING_DEBUG) { System.Diagnostics.Debug.WriteLine("Covered: "); System.Diagnostics.Debug.WriteLine("\tPoints\t\t\t" + numCoveredNodes[POINTS]); System.Diagnostics.Debug.WriteLine("\tSegments\t\t" + numCoveredNodes[SEGMENTS]); System.Diagnostics.Debug.WriteLine("\tAngles\t\t\t" + numCoveredNodes[ANGLES]); System.Diagnostics.Debug.WriteLine("\tIntersection\t" + numCoveredNodes[INTERSECTION]); System.Diagnostics.Debug.WriteLine("\tTriangles\t\t" + numCoveredNodes[TRIANGLES]); System.Diagnostics.Debug.WriteLine("\tInMiddles\t\t" + numCoveredNodes[IN_MIDDLES]); System.Diagnostics.Debug.WriteLine("\t\t\t\t\t" + totalCovered); System.Diagnostics.Debug.WriteLine("Uncovered: "); System.Diagnostics.Debug.WriteLine("\tPoints\t\t\t" + numUncoveredNodes[POINTS]); System.Diagnostics.Debug.WriteLine("\tSegments\t\t" + numUncoveredNodes[SEGMENTS]); System.Diagnostics.Debug.WriteLine("\tAngles\t\t\t" + numUncoveredNodes[ANGLES]); System.Diagnostics.Debug.WriteLine("\tIntersection\t" + numUncoveredNodes[INTERSECTION]); System.Diagnostics.Debug.WriteLine("\tTriangles\t\t" + numUncoveredNodes[TRIANGLES]); System.Diagnostics.Debug.WriteLine("\tInMiddles\t\t" + numUncoveredNodes[IN_MIDDLES]); System.Diagnostics.Debug.WriteLine("\t\t\t\t\t" + totalUncovered); } // // Calculate the coverage percentages // double[] percentageCovered = new double[NUM_INTRINSIC]; for (int w = 0; w < NUM_INTRINSIC; w++) { // If there are none of the particular node we have 'covered' them all if (numCoveredNodes[w] + numUncoveredNodes[w] == 0) { percentageCovered[w] = 1; } else { percentageCovered[w] = (double)(numCoveredNodes[w]) / (numCoveredNodes[w] + numUncoveredNodes[w]); } } return(percentageCovered); }