/// <summary> /// Adds to recipe. This is called (currently only) from the RCA loop. This happens /// directly after the rule is APPLIED. A rule application updates /// the currentstate, so this correspondingly adds the option to the recipe. /// </summary> /// <param name = "currentOpt">The currentrule.</param> public virtual void addToRecipe(option currentOpt) { recipe.Add(currentOpt.copy()); }
public override double[] choose(option RC, candidate cand) { return null; }
/* A copy of a candidate is returned. Very similar to designGraph copy. * We make sure to not do a shallow copy (ala Clone) since we are unsure * how each candidate may be changed in the future. */ public candidate copy() { candidate copyOfCand = new candidate(); copyOfCand.currentState = this.currentState.copy(); foreach (designGraph d in prevStates) copyOfCand.prevStates.Add(d.copy()); foreach (option rc in recipe) { option copiedRC = new option(); copiedRC.ruleSetIndex = rc.ruleSetIndex; copiedRC.ruleNumber = rc.ruleNumber; copiedRC.rule = rc.rule; copiedRC.location = rc.location; copyOfCand.recipe.Add(copiedRC); } foreach (double f in this.performanceParams) copyOfCand.performanceParams.Add(f); foreach (GenerationStatuses a in this.GenerationStatus) copyOfCand.GenerationStatus.Add(a); return copyOfCand; }
/// <summary> /// Predicts whether the option p invalidates option q. /// This invalidation is a tricky thing. For the most part, this function /// has been carefully coded to handle all cases. The only exceptions /// are from what the additional recognize and apply functions require or modify. /// This is handled by actually testing to see if this is true. /// </summary> /// <param name="p">The p.</param> /// <param name="q">The q.</param> /// <param name="cand">The cand.</param> /// <param name="confluenceAnalysis">The confluence analysis.</param> /// <returns></returns> private static int doesPInvalidateQ(option p, option q, candidate cand, ConfluenceAnalysis confluenceAnalysis) { #region Global Labels var pIntersectLabels = p.rule.L.globalLabels.Intersect(p.rule.R.globalLabels); var pRemovedLabels = new List <string>(p.rule.L.globalLabels); pRemovedLabels.RemoveAll(s => pIntersectLabels.Contains(s)); var pAddedLabels = new List <string>(p.rule.R.globalLabels); pAddedLabels.RemoveAll(s => pIntersectLabels.Contains(s)); if ( /* first check that there are no labels deleted that the other depeonds on*/ (q.rule.L.globalLabels.Intersect(pRemovedLabels).Any()) || /* adding labels is problematic if the other rule was recognized under * the condition of containsAllLocalLabels. */ ((q.rule.containsAllGlobalLabels) && (pAddedLabels.Any())) || /* adding labels is also problematic if you add a label that negates the * other rule. */ (pAddedLabels.Intersect(q.rule.negateLabels).Any())) { return(1); } #endregion #region Nodes /* first we check the nodes. If two options do not share any nodes, then * the whole block of code is skipped. q is to save time if comparing many * options on a large graph. However, since there is some need to understand what * nodes are saved in rule execution, the following two lists are defined outside * of q condition and are used in the Arcs section below. */ /* why are the following three parameters declared here and not in scope with the * other node parameters below? This is because they are used in the induced and * shape restriction calculations below - why calculate twice? */ int Num_pKNodes = 0; string[] pNodesKNames = null; node[] pKNodes = null; var commonNodes = q.nodes.Intersect(p.nodes); if (commonNodes.Any()) /* if there are no common nodes, then no need to check the details. */ { /* the following arrays of nodes are within the rule not the host. */ #region Check whether there are nodes that p will delete that q depends upon. var pNodesLNames = from n in p.rule.L.nodes where ((ruleNode)n).MustExist select n.name; var pNodesRNames = from n in p.rule.R.nodes select n.name; pNodesKNames = pNodesRNames.Intersect(pNodesLNames).ToArray(); Num_pKNodes = pNodesKNames.GetLength(0); pKNodes = new node[Num_pKNodes]; for (var i = 0; i < p.rule.L.nodes.Count; i++) { var index = Array.IndexOf(pNodesKNames, p.rule.L.nodes[i].name); if (index >= 0) { pKNodes[index] = p.nodes[i]; } else if (commonNodes.Contains(p.nodes[i])) { return(1); } } #endregion #region NodesModified /* in the above regions where deletions are checked, we also create lists for potentially * modified nodes, nodes common to both L and R. We will now check these. There are several * ways that a node can be modified: * 1. labels are removed * 2. labels are added (and potentially in the negabels of the other rule). * 3. number of arcs connected, which affect strictDegreeMatch * 4. variables are added/removed/changed * * There first 3 conditions are check all at once below. For the last one, it is impossible * to tell without executing extra functions that the user may have created for rule * recognition. Therefore, additional functions are not check in q confluence check. */ foreach (var commonNode in commonNodes) { var qNodeL = (ruleNode)q.rule.L.nodes[q.nodes.IndexOf(commonNode)]; var pNodeL = (ruleNode)p.rule.L.nodes[p.nodes.IndexOf(commonNode)]; var pNodeR = (ruleNode)p.rule.R[pNodeL.name]; pIntersectLabels = pNodeL.localLabels.Intersect(pNodeR.localLabels); pRemovedLabels = new List <string>(pNodeL.localLabels); pRemovedLabels.RemoveAll(s => pIntersectLabels.Contains(s)); pAddedLabels = new List <string>(pNodeR.localLabels); pAddedLabels.RemoveAll(s => pIntersectLabels.Contains(s)); if ( /* first check that there are no labels deleted that the other depeonds on*/ (qNodeL.localLabels.Intersect(pRemovedLabels).Any()) || /* adding labels is problematic if the other rule was recognized under * the condition of containsAllLocalLabels. */ ((qNodeL.containsAllLocalLabels) && (pAddedLabels.Any())) || /* adding labels is also problematic if you add a label that negates the * other rule. */ (pAddedLabels.Intersect(qNodeL.negateLabels).Any()) || /* if one rule uses strictDegreeMatch, we need to make sure the other rule * doesn't change the degree. */ (qNodeL.strictDegreeMatch && (pNodeL.degree != pNodeR.degree)) || /* actually, the degree can also change from free-arc embedding rules. These * are checked below. */ (qNodeL.strictDegreeMatch && (p.rule.embeddingRules.FindAll(e => (e.RNodeName.Equals(pNodeR.name))).Count > 0))) { return(1); } } #endregion } #endregion #region Arcs var commonArcs = q.arcs.Intersect(p.arcs); if (commonArcs.Any()) /* if there are no common arcs, then no need to check the details. */ { /* the following arrays of arcs are within the rule not the host. */ #region Check whether there are arcs that p will delete that q depends upon. var pArcsLNames = from n in p.rule.L.arcs where ((ruleArc)n).MustExist select n.name; var pArcsRNames = from n in p.rule.R.arcs select n.name; var pArcsKNames = new List <string>(pArcsRNames.Intersect(pArcsLNames)); var pKArcs = new arc[pArcsKNames.Count()]; for (var i = 0; i < p.rule.L.arcs.Count; i++) { if (pArcsKNames.Contains(p.rule.L.arcs[i].name)) { pKArcs[pArcsKNames.IndexOf(p.rule.L.arcs[i].name)] = p.arcs[i]; } else if (commonArcs.Contains(p.arcs[i])) { return(1); } } #endregion #region ArcsModified foreach (var commonArc in commonArcs) { var qArcL = (ruleArc)q.rule.L.arcs[q.arcs.IndexOf(commonArc)]; var pArcL = (ruleArc)p.rule.L.arcs[p.arcs.IndexOf(commonArc)]; var pArcR = (ruleArc)p.rule.R[pArcL.name]; pIntersectLabels = pArcL.localLabels.Intersect(pArcR.localLabels); pRemovedLabels = new List <string>(pArcL.localLabels); pRemovedLabels.RemoveAll(s => pIntersectLabels.Contains(s)); pAddedLabels = new List <string>(pArcR.localLabels); pAddedLabels.RemoveAll(s => pIntersectLabels.Contains(s)); if ( /* first check that there are no labels deleted that the other depeonds on*/ (qArcL.localLabels.Intersect(pRemovedLabels).Any()) || /* adding labels is problematic if the other rule was recognized under * the condition of containsAllLocalLabels. */ ((qArcL.containsAllLocalLabels) && (pAddedLabels.Any())) || /* adding labels is also problematic if you add a label that negates the * other rule. */ (pAddedLabels.Intersect(qArcL.negateLabels).Any()) || /* if one rule uses strictDegreeMatch, we need to make sure the other rule * doesn't change the degree. */ /* if one rule requires that an arc be dangling for correct recognition (nullMeansNull) * then we check to make sure that the other rule doesn't add a node to it. */ ((qArcL.nullMeansNull) && (((qArcL.From == null) && (pArcR.From != null)) || ((qArcL.To == null) && (pArcR.To != null)))) || /* well, even if the dangling isn't required, we still need to ensure that p * doesn't put a node on an empty end that q is expecting to belong * to some other node. */ ((pArcL.From == null) && (pArcR.From != null) && (qArcL.From != null)) || /* check the To direction as well */ ((pArcL.To == null) && (pArcR.To != null) && (qArcL.To != null)) || /* in q, the rule is not matching with a dangling arc, but we need to ensure that * the rule doesn't remove or re-connect the arc to something else in the K of the rule * such that the recogniation of the second rule is invalidated. q may be a tad strong * (or conservative) as there could still be confluence despite the change in connectivity. * How? I have yet to imagine. But clearly the assumption here is that change in * connectivity prevent confluence. */ ((pArcL.From != null) && (pNodesKNames != null && pNodesKNames.Contains(pArcL.From.name)) && ((pArcR.From == null) || (pArcL.From.name != pArcR.From.name))) || ((pArcL.To != null) && (pNodesKNames != null && pNodesKNames.Contains(pArcL.To.name)) && ((pArcR.To == null) || (pArcL.To.name != pArcR.To.name))) || /* Changes in Arc Direction * * /* finally we check that the changes in arc directionality (e.g. making * directed, making doubly-directed, making undirected) do not affect * the other rule. */ /* Here, the directionIsEqual restriction is easier to check than the * default case where directed match with doubly-directed and undirected * match with directed. */ ((qArcL.directionIsEqual) && ((!qArcL.directed.Equals(pArcR.directed)) || (!qArcL.doublyDirected.Equals(pArcR.doublyDirected)))) || ((qArcL.directed && !pArcR.directed) || (qArcL.doublyDirected && !pArcR.doublyDirected)) ) { return(1); } } #endregion } #endregion #region HyperArcs /* Onto hyperarcs! q is similiar to nodes - more so than arcs. */ var commonHyperArcs = q.hyperarcs.Intersect(p.hyperarcs); if (commonHyperArcs.Any()) { #region Check whether there are hyperarcs that p will delete that q.option depends upon. var pHyperArcsLNames = from n in p.rule.L.hyperarcs where ((ruleHyperarc)n).MustExist select n.name; var pHyperArcsRNames = from n in p.rule.R.hyperarcs select n.name; var pHyperArcsKNames = new List <string>(pHyperArcsRNames.Intersect(pHyperArcsLNames)); var pKHyperarcs = new hyperarc[pHyperArcsKNames.Count()]; for (var i = 0; i < p.rule.L.hyperarcs.Count; i++) { if (pHyperArcsKNames.Contains(p.rule.L.hyperarcs[i].name)) { pKHyperarcs[pHyperArcsKNames.IndexOf(p.rule.L.hyperarcs[i].name)] = p.hyperarcs[i]; } else if (commonHyperArcs.Contains(p.hyperarcs[i])) { return(1); } } #endregion #region HyperArcsModified foreach (var commonHyperArc in commonHyperArcs) { var qHyperArcL = (ruleHyperarc)q.rule.L.hyperarcs[q.hyperarcs.IndexOf(commonHyperArc)]; var pHyperArcL = (ruleHyperarc)p.rule.L.hyperarcs[p.hyperarcs.IndexOf(commonHyperArc)]; var pHyperArcR = (ruleHyperarc)p.rule.R[pHyperArcL.name]; pIntersectLabels = pHyperArcL.localLabels.Intersect(pHyperArcR.localLabels); pRemovedLabels = new List <string>(pHyperArcL.localLabels); pRemovedLabels.RemoveAll(s => pIntersectLabels.Contains(s)); pAddedLabels = new List <string>(pHyperArcR.localLabels); pAddedLabels.RemoveAll(s => pIntersectLabels.Contains(s)); if ( /* first check that there are no labels deleted that the other depends on*/ (qHyperArcL.localLabels.Intersect(pRemovedLabels).Any()) || /* adding labels is problematic if the other rule was recognized under * the condition of containsAllLocalLabels. */ ((qHyperArcL.containsAllLocalLabels) && (pAddedLabels.Any())) || /* adding labels is also problematic if you add a label that negates the * other rule. */ (pAddedLabels.Intersect(qHyperArcL.negateLabels).Any()) || /* if one rule uses strictDegreeMatch, we need to make sure the other rule * doesn't change the degree. */ (qHyperArcL.strictNodeCountMatch && (pHyperArcL.degree != pHyperArcR.degree))) { /* actually, the degree can also change from free-arc embedding rules. These * are checked below. */ return(1); } } #endregion } #endregion #region now we're left with some tricky checks... if (commonNodes.Any()) { #region if q is induced /* if q is induced then p will invalidate it, if it adds arcs between the * common nodes. */ if (q.rule.induced) { var pArcsLNames = from a in p.rule.L.arcs select a.name; if ((from newArc in p.rule.R.arcs.Where(a => !pArcsLNames.Contains(a.name)) where newArc.To != null && newArc.From != null let toName = newArc.To.name let fromName = newArc.To.name where pNodesKNames.Contains(toName) && pNodesKNames.Contains(fromName) where commonNodes.Contains(pKNodes[Array.IndexOf(pNodesKNames, toName)]) && commonNodes.Contains(pKNodes[Array.IndexOf(pNodesKNames, fromName)]) select toName).Any()) { return(1); } /* is there another situation in which an embedding rule in p may work against * q being an induced rule? It doesn't seem like it would seem embedding rules * reattach free-arcs. oh, what about arc duplication in embedding rules? nah. */ } #endregion #region shape restrictions for (int i = 0; i < Num_pKNodes; i++) { var pNode = pKNodes[i]; if (commonNodes.Contains(pNode)) { continue; } var pname = pNodesKNames[i]; var lNode = (node)p.rule.L[pname]; var rNode = (node)p.rule.R[pname]; if (q.rule.UseShapeRestrictions && p.rule.TransformNodePositions && !(MatrixMath.sameCloseZero(lNode.X, rNode.X) && MatrixMath.sameCloseZero(lNode.Y, rNode.Y) && MatrixMath.sameCloseZero(lNode.Z, rNode.Z))) { return(1); } if ((q.rule.RestrictToNodeShapeMatch && p.rule.TransformNodeShapes && lNode.DisplayShape != null && rNode.DisplayShape != null) && !(MatrixMath.sameCloseZero(lNode.DisplayShape.Height, rNode.DisplayShape.Height) && MatrixMath.sameCloseZero(lNode.DisplayShape.Width, rNode.DisplayShape.Width) && MatrixMath.sameCloseZero(p.positionTransform[0, 0], 1) && MatrixMath.sameCloseZero(p.positionTransform[1, 1], 1) && MatrixMath.sameCloseZero(p.positionTransform[1, 0]) && MatrixMath.sameCloseZero(p.positionTransform[0, 1]))) { return(1); } } #endregion } /* you've run the gauntlet of easy checks, now need to check * (1) if there is something caught by additional recognition functions, * or (2) NOTExist elements now exist. These can only be solving by an empirical * test, which will be expensive. * So, now we switch from conditions that return true (or a 1; p does invalidate q) to conditions that return false. */ if (q.rule.ContainsNegativeElements || q.rule.recognizeFuncs.Any() || p.rule.applyFuncs.Any()) { if (confluenceAnalysis == ConfluenceAnalysis.Full) { return(fullInvalidationCheck(p, q, cand)); } else { return(0); //return 0 is like "I don't know" } } return(-1); //like false, there is no invalidating - in otherwords confluence (maybe)! #endregion }
/* This is called (currently only) from the RCA loop. This happens * directly after the rule is APPLIED. A rule application updates * the currentstate, so this correspondingly adds the option * to the recipe. */ public void addToRecipe(option currentrule) { option newestrule = new option(); newestrule.ruleSetIndex = currentrule.ruleSetIndex; newestrule.rule = currentrule.rule; newestrule.ruleNumber = currentrule.ruleNumber; newestrule.location = currentrule.location; recipe.Add(newestrule); }
/* Given that the rule has now been chosen, determine the values needed by the * rule to properly apply it to the candidate, cand. The array of double is to * be determined by parametric apply rules written in complement C# files for * the ruleSet being used. */ public abstract double[] choose(option RC, candidate cand);
/* this is similar for rules with nonexistence graph elements*/ private void FindPositiveStartElementAvoidNegatives(option location) { #region Case #1: Location found! No empty slots left in the location /* this is the only way to properly exist the recursive loop. */ if (!L.nodes.Any(n => ((ruleNode)n).MustExist && location.findLMappedNode(n) == null) && !L.arcs.Any(a => ((ruleArc)a).MustExist && location.findLMappedArc(a) == null) && !L.hyperarcs.Any(n => ((ruleHyperarc)n).MustExist && location.findLMappedHyperarc(n) == null)) { /* as a recursive function, we first check how the recognition process terminates. If all nodes, * hyperarcs and arcs within location have been filled with references to elements in the host, * then we've found a location...well maybe. More details are described in the LocationFound function. */ if (!FinalRuleChecks(location) && !FinalRuleCheckRelaxed(location)) { return; } Boolean resultNegativeNotFulfilled; lock (AllNegativeElementsFound) { AllNegativeElementsFound = false; negativeRelaxation = location.Relaxations.copy(); findNegativeStartElement(location); resultNegativeNotFulfilled = !(bool)AllNegativeElementsFound; } if (resultNegativeNotFulfilled) { var locCopy = location.copy(); locCopy.Relaxations = negativeRelaxation; lock (options) { options.Add(locCopy); } } return; } #endregion #region Case #2: build off of a hyperarc found so far - by looking for unfulfilled nodes /* the quickest approach to finding a new element in the LHS to host subgraph matching is to build * directly off of elements found so far. This is because we don't need to check amongst ALL elements in the * host (as is the case in the last three cases below). In this case we start with any hyperarcs * that have already been matched to one in the host, and see if it connects to any nodes that * have yet to be matched. */ var startHyperArc = (ruleHyperarc)L.hyperarcs.FirstOrDefault(ha => ((ruleHyperarc)ha).MustExist && ((location.findLMappedHyperarc(ha) != null) && (ha.nodes.Any(n => ((ruleNode)n).MustExist && (location.findLMappedNode(n) == null))))); if (startHyperArc != null) { var hostHyperArc = location.findLMappedHyperarc(startHyperArc); var newLNode = (ruleNode)startHyperArc.nodes.FirstOrDefault(n => ((ruleNode)n).MustExist && (location.findLMappedNode(n) == null)); foreach (var n in hostHyperArc.nodes.Where(n => !location.nodes.Contains(n))) { checkNodeAvoidNegatives(location.copy(), newLNode, n); } return; } #endregion #region Case #3: build off of a node found so far - by looking for unfulfilled arcs /* as stated above, the quickest approach is to build from elements that have already been found. * Therefore, we see if there are any nodes already matched to a node in L that has an arc in L * that has yet to be matched with a host arc. This is more efficient than the last 3 cases * because they look through the entire host, which is potentially large. */ var startNode = (ruleNode)L.nodes.FirstOrDefault(n => ((ruleNode)n).MustExist && ((location.findLMappedNode(n) != null) && (n.arcs.Any(a => (((a is ruleHyperarc) && ((ruleHyperarc)a).MustExist) || ((a is ruleArc) && ((ruleArc)a).MustExist)) && (location.findLMappedElement(a) == null))))); /* is there a node already matched (which would only occur if your recursed to get here) that has an * unrecognized arc attaced to it. If yes, try all possible arcs in the host with the one that needs * to be fulfilled in L. */ if (startNode != null) { var newLArc = startNode.arcs.FirstOrDefault(a => (((a is ruleHyperarc) && ((ruleHyperarc)a).MustExist) || ((a is ruleArc) && ((ruleArc)a).MustExist)) && (location.findLMappedElement(a) == null)); if (newLArc is ruleHyperarc) { checkHyperArcAvoidNegatives(location, startNode, location.findLMappedNode(startNode), (ruleHyperarc)newLArc); } else if (newLArc is ruleArc) { checkArcAvoidNegatives(location, startNode, location.findLMappedNode(startNode), (ruleArc)newLArc); } return; } #endregion #region Case 4: Check entire host for a matching hyperarc /* if the above cases didn't match we try to match a hyperarc in the L to any in the host. Since the * prior three cases have conditions which require some non-nulls in the location, this is likely where the * process will start when invoked from line 79 of recognize above. Hyperarcs are most efficient to start from * since there are likely fewer hyperarcs in the host than nodes, or arcs. */ startHyperArc = (ruleHyperarc)L.hyperarcs.FirstOrDefault(ha => ((ruleHyperarc)ha).MustExist && (location.findLMappedHyperarc(ha) == null)); if (startHyperArc != null) { if (_in_parallel_) { Parallel.ForEach(host.hyperarcs, hostHyperArc => { if (!location.hyperarcs.Contains(hostHyperArc)) { checkHyperArcAvoidNegatives(location.copy(), startHyperArc, hostHyperArc); } }); } else { foreach (var hostHyperArc in host.hyperarcs.Where(hostHyperArc => !location.hyperarcs.Contains(hostHyperArc))) { checkHyperArcAvoidNegatives(location.copy(), startHyperArc, hostHyperArc); } } return; } #endregion #region Case 5: Check entire host for a matching node /* If no other hyperarcs to recognize look to a unlocated node. If one gets here then none of the above * three conditions were met (obviously) but this also implies that there are multiple components in the * LHS, and we are now jumping to a new one with this. This is potentially time intensive if there are * a lot of nodes in the host. We allow for the possibility that this recognition can be done in parallel. */ startNode = (ruleNode)L.nodes.FirstOrDefault(n => ((ruleNode)n).MustExist && (location.findLMappedNode(n) == null)); if (startNode != null) { if (_in_parallel_) { Parallel.ForEach(host.nodes, hostNode => { if (!location.nodes.Contains(hostNode)) { checkNodeAvoidNegatives(location.copy(), startNode, hostNode); } }); } else { foreach (var hostNode in host.nodes .Where(hostNode => !location.nodes.Contains(hostNode))) { checkNodeAvoidNegatives(location.copy(), startNode, hostNode); } } return; } #endregion #region Case 6: Check entire host for a matching arc var looseArc = (ruleArc)L.arcs.FirstOrDefault(a => ((ruleArc)a).MustExist && (location.findLMappedArc(a) == null)); /* the only way one can get here is if there are one or more arcs NOT connected to any nodes * in L - a floating arc, dangling on both sides, like an eyelash. */ if (looseArc != null) { if (_in_parallel_) { Parallel.ForEach(host.arcs, hostArc => { if ((!location.arcs.Contains(hostArc)) && (!location.nodes.Contains(hostArc.From)) && (!location.nodes.Contains(hostArc.To)) && (arcMatches(looseArc, hostArc) || arcMatchRelaxed(looseArc, hostArc, location))) { var newLocation = location.copy(); newLocation.arcs[L.arcs.IndexOf(looseArc)] = hostArc; FindPositiveStartElementAvoidNegatives(newLocation); } }); } else { foreach (var hostArc in host.arcs) { if ((!location.arcs.Contains(hostArc)) && (!location.nodes.Contains(hostArc.From)) && (!location.nodes.Contains(hostArc.To)) && (arcMatches(looseArc, hostArc) || arcMatchRelaxed(looseArc, hostArc, location))) { var newLocation = location.copy(); newLocation.arcs[L.arcs.IndexOf(looseArc)] = hostArc; FindPositiveStartElementAvoidNegatives(newLocation); } } } } #endregion }
private Boolean InvalidateWithRelaxation(option location) { if (negativeRelaxation.NumberAllowable == 0) { return(false); } var ruleNegElts = new List <graphElement>(); ruleNegElts.AddRange(L.nodes.FindAll(n => ((ruleNode)n).NotExist)); ruleNegElts.AddRange(L.arcs.FindAll(a => ((ruleArc)a).NotExist)); ruleNegElts.AddRange(L.hyperarcs.FindAll(h => ((ruleHyperarc)h).NotExist)); foreach (var ruleElt in ruleNegElts) { var hostElt = location.findLMappedElement(ruleElt); if (ruleElt.localLabels.Count < hostElt.localLabels.Count) { var rContainsAll = negativeRelaxation.FirstOrDefault( r => r.Matches(Relaxations.Contains_All_Local_Labels_Imposed, ruleElt)); if (rContainsAll != null) { negativeRelaxation.NumberAllowable--; rContainsAll.NumberAllowed--; negativeRelaxation.FulfilledItems.Add( new RelaxItem(Relaxations.Contains_All_Local_Labels_Imposed, 1, ruleElt, hostElt.localLabels.Count.ToString(CultureInfo.InvariantCulture))); return(true); } } if ((ruleElt is ruleNode) && (!((ruleNode)ruleElt).strictDegreeMatch) && ((node)ruleElt).degree != ((node)hostElt).degree) { var rStrictDegree = negativeRelaxation.FirstOrDefault(r => r.Matches(Relaxations.Strict_Degree_Match_Imposed, ruleElt)); if (rStrictDegree != null) { negativeRelaxation.NumberAllowable--; rStrictDegree.NumberAllowed--; negativeRelaxation.FulfilledItems.Add(new RelaxItem(Relaxations.Strict_Degree_Match_Imposed, 1, ruleElt, ((node)hostElt).degree.ToString( CultureInfo.InvariantCulture))); return(true); } } if ((ruleElt is ruleHyperarc) && (!((ruleHyperarc)ruleElt).strictNodeCountMatch) && ((hyperarc)ruleElt).degree != ((hyperarc)hostElt).degree) { var rStrictDegree = negativeRelaxation.FirstOrDefault(r => r.Matches(Relaxations.Strict_Node_Count_Imposed, ruleElt)); if (rStrictDegree != null) { negativeRelaxation.NumberAllowable--; rStrictDegree.NumberAllowed--; negativeRelaxation.FulfilledItems.Add(new RelaxItem(Relaxations.Strict_Node_Count_Imposed, 1, ruleElt, ((hyperarc)hostElt).degree.ToString( CultureInfo.InvariantCulture))); return(true); } } if (ruleElt is arc) { var rulearc = (ruleArc)ruleElt; var hostarc = (arc)hostElt; if (!rulearc.nullMeansNull && ((rulearc.To == null && hostarc.To != null) || (rulearc.From == null && hostarc.From != null))) { var rNullMeansNull = negativeRelaxation.FirstOrDefault(r => r.Matches(Relaxations.Null_Means_Null_Imposed, ruleElt)); if (rNullMeansNull != null) { negativeRelaxation.NumberAllowable--; rNullMeansNull.NumberAllowed--; negativeRelaxation.FulfilledItems.Add(new RelaxItem(Relaxations.Null_Means_Null_Imposed, 1, ruleElt, (rulearc.To == null) ? hostarc.To.name : hostarc.From.name)); return(true); } } if (!rulearc.directionIsEqual && ((rulearc.doublyDirected != hostarc.doublyDirected) || (rulearc.directed != hostarc.directed))) { var rDir = negativeRelaxation.FirstOrDefault( r => r.Matches(Relaxations.Direction_Is_Equal_Imposed, ruleElt)); if (rDir != null) { negativeRelaxation.NumberAllowable--; rDir.NumberAllowed--; negativeRelaxation.FulfilledItems.Add(new RelaxItem(Relaxations.Direction_Is_Equal_Imposed, 1, ruleElt)); return(true); } } } var ruleNegLabels = (ruleElt is ruleNode) ? ((ruleNode)ruleElt).negateLabels : (ruleElt is ruleArc) ? ((ruleArc)ruleElt).negateLabels : ((ruleHyperarc)ruleElt).negateLabels; foreach (var negLabel in ruleNegLabels) { var rLabel = negativeRelaxation.FirstOrDefault( r => r.Matches(Relaxations.Label_Imposed, ruleElt, negLabel)); if (rLabel != null) { negativeRelaxation.NumberAllowable--; rLabel.NumberAllowed--; negativeRelaxation.FulfilledItems.Add(new RelaxItem(Relaxations.Label_Imposed, 1, ruleElt, negLabel)); return(true); } } foreach (var lab in ruleElt.localLabels) { var rNegLabel = negativeRelaxation.FirstOrDefault( r => r.Matches(Relaxations.Negate_Label_Imposed, ruleElt, lab)); if (rNegLabel != null) { negativeRelaxation.NumberAllowable--; rNegLabel.NumberAllowed--; negativeRelaxation.FulfilledItems.Add(new RelaxItem(Relaxations.Negate_Label_Imposed, 1, ruleElt, lab)); return(true); } } } var localNumAllowable = negativeRelaxation.NumberAllowable; var usedRelaxItems = new List <RelaxItem>(); var usedFulfilledRelaxItems = new List <RelaxItem>(); foreach (var elt in ruleNegElts) { var rNotExist = negativeRelaxation.FirstOrDefault(r => r.Matches(Relaxations.Element_Made_Positive, elt) && usedRelaxItems.Count(ur => ur == r) < r.NumberAllowed); if (rNotExist == null) { break; } localNumAllowable--; usedRelaxItems.Add(rNotExist); usedFulfilledRelaxItems.Add(new RelaxItem(Relaxations.Element_Made_Positive, 1, elt)); } if ((localNumAllowable >= 0) && usedFulfilledRelaxItems.Count == ruleNegElts.Count) { negativeRelaxation.NumberAllowable = localNumAllowable; foreach (var r in usedRelaxItems) { r.NumberAllowed--; } negativeRelaxation.FulfilledItems.AddRange(usedFulfilledRelaxItems); return(true); } return(false); }
private void findNegativeStartElement(option location) { if ((bool)AllNegativeElementsFound) { return; /* another sub-branch found a match to the negative elements. * There's no point in finding more than one, so this statement * aborts the search down this branch. */ } #region Case #1: Location found! No empty slots left in the location /* this is the only way to properly exit the recursive loop. */ if (!location.nodes.Contains(null) && !location.arcs.Contains(null) && !location.hyperarcs.Contains(null)) { if (FinalRuleChecks(location)) { if (!InvalidateWithRelaxation(location)) { AllNegativeElementsFound = true; } } return; } #endregion #region Case #2: build off of a hyperarc found so far - by looking for unfulfilled nodes /* the quickest approach to finding a new element in the LHS to host subgraph matching is to build * directly off of elements found so far. This is because we don't need to check amongst ALL elements in the * host (as is the case in the last three cases below). In this case we start with any hyperarcs * that have already been matched to one in the host, and see if it connects to any nodes that * have yet to be matched. */ var startHyperArc = (ruleHyperarc)L.hyperarcs.FirstOrDefault(ha => ((location.findLMappedHyperarc(ha) != null) && (ha.nodes.Any(n => (location.findLMappedNode(n) == null))))); if (startHyperArc != null) { var hostHyperArc = location.findLMappedHyperarc(startHyperArc); var newLNode = (ruleNode)startHyperArc.nodes.FirstOrDefault(n => (location.findLMappedNode(n) == null)); foreach (var n in hostHyperArc.nodes.Where(n => !location.nodes.Contains(n))) { checkForNegativeNode(location.copy(), newLNode, n); if ((bool)AllNegativeElementsFound) { return; } } return; } #endregion #region Case #2.5: build off of a partially matched arc /* unlike the other renditions of this function (findNewStartElement, * findPositiveStartElementAvoidNegatives) this has a situation in which an arc has only been * partially matched but the connected nodes were not touched because they were negative elements.*/ var startArc = (ruleArc)L.arcs.FirstOrDefault(a => (location.findLMappedArc(a) != null) && a.To != null && location.findLMappedNode(a.To) == null); if (startArc != null) { var hostArc = location.findLMappedArc(startArc); if ((hostArc.To != null) && !location.nodes.Contains(hostArc.To)) { checkForNegativeNode(location.copy(), (ruleNode)startArc.To, hostArc.To); } else if (!startArc.directionIsEqual && hostArc.From != null && !location.nodes.Contains(hostArc.From)) { checkForNegativeNode(location.copy(), (ruleNode)startArc.To, hostArc.From); } return; } startArc = (ruleArc)L.arcs.FirstOrDefault(a => (location.findLMappedArc(a) != null) && a.From != null && location.findLMappedNode(a.From) == null); if (startArc != null) { var hostArc = location.findLMappedArc(startArc); if ((hostArc.From != null) && !location.nodes.Contains(hostArc.From)) { checkForNegativeNode(location.copy(), (ruleNode)startArc.From, hostArc.From); } else if (!startArc.directionIsEqual && hostArc.To != null && !location.nodes.Contains(hostArc.To)) { checkForNegativeNode(location.copy(), (ruleNode)startArc.From, hostArc.To); } return; } #endregion #region Case #3: build off of a node found so far - by looking for unfulfilled arcs /* as stated above, the quickest approach is to build from elements that have already been found. * Therefore, we see if there are any nodes already matched to a node in L that has an arc in L * that has yet to be matched with a host arc. This is more efficient than the last 3 cases * because they look through the entire host, which is potentially large. */ var startNode = (ruleNode)L.nodes.FirstOrDefault(n => ((location.findLMappedNode(n) != null) && (n.arcs.Any(a => (location.findLMappedElement(a) == null))))); /* is there a node already matched (which would only occur if your recursed to get here) that has an * unrecognized arc attaced to it. If yes, try all possible arcs in the host with the one that needs * to be fulfilled in L. */ if (startNode != null) { var newLArc = startNode.arcs.FirstOrDefault(a => (location.findLMappedElement(a) == null)); if (newLArc is ruleHyperarc) { checkForNegativeHyperArc(location, startNode, location.findLMappedNode(startNode), (ruleHyperarc)newLArc); } else if (newLArc is ruleArc) { checkForNegativeArc(location, startNode, location.findLMappedNode(startNode), (ruleArc)newLArc); } return; } #endregion #region Case 4: Check entire host for a matching hyperarc /* if the above cases didn't match we try to match a hyperarc in the L to any in the host. Since the * prior three cases have conditions which require some non-nulls in the location, this is likely where the * process will start when invoked from line 87 of recognize above. Hyperarcs are most efficient to start from * since there are likely fewer hyperarcs in the host than nodes, or arcs. */ startHyperArc = (ruleHyperarc)L.hyperarcs.FirstOrDefault(ha => (location.findLMappedHyperarc(ha) == null)); if (startHyperArc != null) { foreach (var hostHyperArc in host.hyperarcs.Where(hostHyperArc => !location.hyperarcs.Contains(hostHyperArc))) { checkForNegativeHyperArc(location.copy(), startHyperArc, hostHyperArc); if ((bool)AllNegativeElementsFound) { return; } } return; } #endregion #region Case 5: Check entire host for a matching node /* If no other hyperarcs can be recognized, then look to a unlocated node. If one gets here then none of the above * three conditions were met (obviously) but this also implies that there are multiple components in the * LHS, and we are now jumping to a new one with this. This is potentially time intensive if there are * a lot of nodes in the host. We allow for the possibility that this recognition can be done in parallel. */ startNode = (ruleNode)L.nodes.FirstOrDefault(n => (location.findLMappedNode(n) == null)); if (startNode != null) { foreach (var hostNode in host.nodes.Where(hostNode => !location.nodes.Contains(hostNode))) { checkForNegativeNode(location.copy(), startNode, hostNode); if ((bool)AllNegativeElementsFound) { return; } } return; } #endregion #region Case 6: Check entire host for a matching arc var looseArc = (ruleArc)L.arcs.FirstOrDefault(a => (location.findLMappedArc(a) == null)); /* the only way one can get here is if there are one or more arcs NOT connected to any nodes * in L - a floating arc, dangling on both sides, like an eyelash. */ if (looseArc != null) { foreach (var hostArc in host.arcs) { if (!location.arcs.Contains(hostArc) && !location.nodes.Contains(hostArc.From) && !location.nodes.Contains(hostArc.To) && arcMatches(looseArc, hostArc)) { var newLocation = location.copy(); newLocation.arcs[L.arcs.IndexOf(looseArc)] = hostArc; findNegativeStartElement(newLocation); } if ((bool)AllNegativeElementsFound) { return; } } } #endregion }