public PartitionResultWrapper Partition(LBT lbt, Segment S_l, Segment S_r, PartitionClass P_l, PartitionClass P_r, RelationTreeNode TLEFT, RelationTreeNode BLEFT) { PartitionResult R = new PartitionResult(); if (TLEFT != null) { foreach (Stroke s in TLEFT.strokes) { R.TLEFT.strokes.Add(s); } } if (BLEFT != null) { foreach (Stroke s in BLEFT.strokes) { R.BLEFT.strokes.Add(s); } } // Handle no right/left symbol. if (S_l == null && S_r == null) { throw new ArgumentException("Partition:: both passed segments are null."); } // y-writing-line hack if (S_l != null && (S_l.classification[0].symbol == "y" || S_l.classification[0].symbol == "\\log" || S_l.classification[0].symbol == "\\tan")) { P_l = PartitionClass.Regular; // if not already? int[] lines = GetCenteredLines(S_l); S_l.bb.Top = lines[0]; S_l.bb.Bottom = lines[1]; } // Case for no right symbol (and default; symbols at left of current symbol). //List< Stroke > SP = S_r == null ? lbt.strokes : lbt.strokes.Where( _s => { if ( _s.aabb.Left < S_r.bb.Left ) { Console.WriteLine( "{0} < {1} (!)", _s.aabb.Left, S_r.bb.Left ); return true; } else { return false; } } ).ToList(); List <Stroke> SP = new List <Stroke>(); if (S_r == null) { SP = lbt.strokes; } else { // foreach ( Stroke _s in lbt.strokes ) { for (int i = 0; i < lbt.strokes.Count; i++) { Stroke _s = lbt.strokes[i]; if (_s.aabb.Right /*Left?*/ < S_r.bb.Left) { //Console.WriteLine( "{0} < {1} (!)", _s.aabb.Left, S_r.bb.Left ); SP.Add(_s); } } } // Case for no left symbol; switch right to "left" symbol. bool noLeft = false; if (S_l == null) { S_l = S_r; SP = lbt.strokes.Where(_s => _s.aabb.Right < S_r.bb.Left).ToList(); noLeft = true; } // HACK. bool indexed = S_l != null && (S_l.classification[0].symbol == "\\sum" || S_l.classification[0].symbol == "\\int" || S_l.classification[0].symbol == "\\lim"); foreach (Stroke _s in SP) { if (S_l != null && !noLeft) { bool horOverlap = !(S_l.bb.Right < _s.aabb.Left || _s.aabb.Right < S_l.bb.Left); bool fullContained = S_l.bb.Left <_s.aabb.Left && S_l.bb.Right> _s.aabb.Right && S_l.bb.Top <_s.aabb.Top && S_l.bb.Bottom> _s.aabb.Bottom; float midRight = S_l.bb.Top + (S_l.bb.Height / 2); float dtop = midRight - _s.aabb.Top; float dbottom = _s.aabb.Bottom - midRight; //float topSpan = Math.Min( Math.Abs( S_l.bb.Top - _s.aabb.Bottom ), Math.Abs( midRight - _s.aabb.Top ) ); //float bottomSpan = Math.Min( Math.Abs( S_l.bb.Bottom - _s.aabb.Top ), Math.Abs( midRight - _s.aabb.Bottom ) ); // Be lenient here; invalid partitions will be pruned bool added = false; if (P_l == PartitionClass.SquareRoot && fullContained) { R.CONTAINS.strokes.Add(_s); added = true; } else if (/*P_l == PartitionClass.HorLine &&*/ horOverlap) { /* * if ( _s.aabb.Bottom <= S_l.bb.Top ) { * R.ABOVE.strokes.Add( _s ); * added = true; * } else if ( _s.aabb.Top > S_l.bb.Bottom ) { * R.BELOW.strokes.Add( _s ); * added = true; * } */ // "less strict" constraints on top/bottom if (_s.aabb.Top < S_l.bb.Top && _s.aabb.Bottom <= S_l.bb.Bottom) { R.ABOVE.strokes.Add(_s); added = true; } else if (_s.aabb.Bottom > S_l.bb.Bottom && _s.aabb.Top >= S_l.bb.Top) { R.BELOW.strokes.Add(_s); added = true; } } else if (dtop > dbottom) { R.SUPER.strokes.Add(_s); added = true; } else { R.SUBSC.strokes.Add(_s); added = true; } #if DEBUG if (!added) { Console.Error.WriteLine("Stroke ({0}) did not get partitioned in {1}.", _s, S_l); } #endif } else { // HACK** There is nothing at left. Create TLEFT/BLEFT regions. double midPoint = (S_r.bb.Top + S_r.bb.Bottom) / 2.0; if (_s.aabb.Bottom <= midPoint) { R.TLEFT.strokes.Add(_s); } else if (_s.aabb.Top > midPoint) { R.BLEFT.strokes.Add(_s); } } } // split SUPER/SUBSC by finding maximum gap location if (S_l != null && S_r != null) { float super_gap = MaximumGapLocation(R.SUPER.strokes, S_l, S_r); float subsc_gap = MaximumGapLocation(R.SUBSC.strokes, S_l, S_r); // super foreach (Stroke s in R.SUPER.strokes) { if (s.aabb.Left > super_gap) { R.TLEFT.strokes.Add(s); } } foreach (Stroke s in R.TLEFT.strokes) { if (R.SUPER.strokes.Contains(s)) { R.SUPER.strokes.Remove(s); } } // subsc foreach (Stroke s in R.SUBSC.strokes) { if (s.aabb.Left > subsc_gap) { R.BLEFT.strokes.Add(s); } } foreach (Stroke s in R.BLEFT.strokes) { if (R.SUBSC.strokes.Contains(s)) { R.SUBSC.strokes.Remove(s); } } } // if nosupersub, add super/sub to TLEFT/BLEFT if (P_l == PartitionClass.HorLine) { foreach (Stroke s in R.SUPER.strokes) { R.TLEFT.strokes.Add(s); } R.SUPER.strokes.Clear(); foreach (Stroke s in R.SUBSC.strokes) { R.BLEFT.strokes.Add(s); } R.SUBSC.strokes.Clear(); } // Create LBTs. R.ABOVE.lbt = new LBT(R.ABOVE.strokes, LBT.DefaultAdjacentCriterion); R.BELOW.lbt = new LBT(R.BELOW.strokes, LBT.DefaultAdjacentCriterion); R.CONTAINS.lbt = new LBT(R.CONTAINS.strokes, LBT.DefaultAdjacentCriterion); R.SUPER.lbt = new LBT(R.SUPER.strokes, LBT.DefaultAdjacentCriterion); R.SUBSC.lbt = new LBT(R.SUBSC.strokes, LBT.DefaultAdjacentCriterion); R.TLEFT.lbt = new LBT(R.TLEFT.strokes, LBT.DefaultAdjacentCriterion); R.BLEFT.lbt = new LBT(R.BLEFT.strokes, LBT.DefaultAdjacentCriterion); // combine above/super/tleft and below/subsc/bleft for horline if (indexed) { if (R.ABOVE.strokes.Count > 0 && (R.SUPER.strokes.Count > 0 || R.TLEFT.strokes.Count > 0)) { foreach (Stroke s in R.SUPER.strokes) { R.ABOVE.strokes.Add(s); } R.SUPER.strokes.Clear(); R.SUPER.lbt = new LBT(R.SUPER.strokes, LBT.DefaultAdjacentCriterion); foreach (Stroke s in R.TLEFT.strokes) { R.ABOVE.strokes.Add(s); } R.TLEFT.strokes.Clear(); R.TLEFT.lbt = new LBT(R.TLEFT.strokes, LBT.DefaultAdjacentCriterion); R.ABOVE.strokes.Sort((s1, s2) => int.Parse(s1.stroke_id) - int.Parse(s2.stroke_id)); R.ABOVE.lbt = new LBT(R.ABOVE.strokes, LBT.DefaultAdjacentCriterion); } if (R.BELOW.strokes.Count > 0 && (R.SUBSC.strokes.Count > 0 || R.BLEFT.strokes.Count > 0)) { foreach (Stroke s in R.SUBSC.strokes) { R.BELOW.strokes.Add(s); } R.SUBSC.strokes.Clear(); R.SUBSC.lbt = new LBT(R.SUBSC.strokes, LBT.DefaultAdjacentCriterion); foreach (Stroke s in R.BLEFT.strokes) { R.BELOW.strokes.Add(s); } R.BLEFT.strokes.Clear(); R.BLEFT.lbt = new LBT(R.BLEFT.strokes, LBT.DefaultAdjacentCriterion); R.BELOW.strokes.Sort((s1, s2) => int.Parse(s1.stroke_id) - int.Parse(s2.stroke_id)); R.BELOW.lbt = new LBT(R.BELOW.strokes, LBT.DefaultAdjacentCriterion); } } //Console.WriteLine("SUPER STROKES: {0}",R.SUPER.lbt.strokes.Count); return(new PartitionResultWrapper(R, UpdateLBT(new Segment { strokes = SP }, lbt))); //lbt = UpdateLBT( new Segment { strokes = SP }, lbt ) }; }
public void applyRules(List <ParseTreeNode> arg_nlist) { #if DEBUG Console.WriteLine("[applyRules] entered."); treeRoot.ShowTree(4, null); Console.WriteLine("Min Required Strokes: " + minRequiredStrokes); Console.WriteLine("Call: " + apply_rule_counter); #else //Console.Write(apply_rule_counter); //Console.Write('\r'); #endif // increment counter apply_rule_counter++; if (arg_nlist.Count == 0) { return; } if (unusedStrokes < 1 && arg_nlist[0] != ParseTreeNode.EndOfBaseline && !(arg_nlist[0] is RelationTreeNode)) { return; } // if ( unusedStrokes > 0 && !( arg_nlist[ 0 ].GetType().Equals( typeof( ParseTreeNode ) ) ) ) return; List <ParseTreeNode> nlist = new List <ParseTreeNode>(arg_nlist); ParseTreeNode n = nlist[0]; nlist.RemoveAt(0); if (n == ParseTreeNode.EndOfBaseline) { // END-OF-BASELINE CASE PartitionResultWrapper pr = attachSymbol(currentSymbol.symbol.lbt, null); // continue only if valid partition made if (pr != null) { if (nlist.Count == 0 && unusedStrokes == 0 && unusedInputStrokes == 0) { #if DEBUG Console.WriteLine("***ACCEPT***"); #endif acceptCurrentParseTree(); } int nodes_added = nlist.Count; if (pr.result != null && pr.result.ABOVE.lbt.strokes.Count != 0) { nlist.Add(pr.result.ABOVE); } if (pr.result != null && pr.result.BELOW.lbt.strokes.Count != 0) { nlist.Add(pr.result.BELOW); } if (pr.result != null && pr.result.CONTAINS.lbt.strokes.Count != 0) { nlist.Add(pr.result.CONTAINS); } if (pr.result != null && pr.result.SUBSC.lbt.strokes.Count != 0) { nlist.Add(pr.result.SUBSC); } if (pr.result != null && pr.result.SUPER.lbt.strokes.Count != 0) { nlist.Add(pr.result.SUPER); } // add BLEFT/TLEFT //if ( pr.result != null && pr.result.BLEFT.lbt.strokes.Count != 0 ) nlist.Add( pr.result.BLEFT ); //if ( pr.result != null && pr.result.TLEFT.lbt.strokes.Count != 0 ) nlist.Add( pr.result.TLEFT ); nodes_added = nlist.Count - nodes_added; minRequiredStrokes += nodes_added; applyRules(nlist); } else { #if DEBUG Console.WriteLine("**BACKTRACK: end-of-baseline, invalid partition"); #endif } } else if (n is RelationTreeNode) { // handle relation nodes by adding new parse tree node RelationTreeNode rtn = n as RelationTreeNode; ParseTreeNode ptn = new ParseTreeNode(); ptn.strokes = rtn.strokes; ptn.nodeType = rtn.nodeType; // increment here for new production ptn.lbt = rtn.lbt; rtn.children.Clear(); // remove all before rtn.children.Add(ptn); parse(ptn.lbt, ptn, nlist); } else if (n.nodeType.StartsWith("*")) { // if n generates terminal symbols List <LexerResult> C = SelectCandidateSymbols(n.nodeType); //candidateSymbols = C; foreach (LexerResult c in C) { PartitionResultWrapper pr = attachSymbol(currentSymbol == null ? initLBT : currentSymbol.symbol.lbt, c); if (pr != null) { pushCurrentState(); // update current state currentSymbol = new PreviousSymbol(c, n, null, null, true); unusedStrokes -= currentSymbol.symbol.segment.strokes.Count; unusedInputStrokes -= currentSymbol.symbol.segment.strokes.Count; // remove one of the min required strokes for the current token minRequiredStrokes -= 1; // prune by number of strokes left if (unusedInputStrokes < minRequiredStrokes) { popCurrentState(); return; // continue; } List <string> layoutClasses = grammar.GetLayoutClassesFromTerminal(c.segment.classification[0].symbol); if (layoutClasses.Count == 0) { continue; } candidateSymbols = new List <LexerResult>(); foreach (string layoutClass in layoutClasses) { List <LexerResult> res = lexer.Next(c.lbt, c.segment, layoutClass, MAX_NEIGHBORS); foreach (LexerResult r in res) { if (!candidateSymbols.Contains(r)) { candidateSymbols.Add(r); } } } if (candidateSymbols.Count == 0 && !nlist.Contains(ParseTreeNode.EndOfBaseline)) { nlist.Insert(0, ParseTreeNode.EndOfBaseline); } // not end of baseline, so prune and backtrack if necessary else if (nlist.Count > 0 && (nlist[0] is RelationTreeNode == false)) { // prune candidates based on the current node type foreach (LexerResult lr in candidateSymbols) { for (int k = 0; k < lr.segment.classification.Count; k++) { if (grammar.NonTerminalCanGenerateTerminal(nlist[0].nodeType, lr.segment.classification[k].symbol) == false) { lr.segment.classification.RemoveAt(k--); } } } // remove candidate symbols which contain no symbol alternatives for (int k = 0; k < candidateSymbols.Count; k++) { if (candidateSymbols[k].segment.classification.Count == 0) { candidateSymbols.RemoveAt(k--); } } // no valid symbols given the grammar, so bbreak out early if (candidateSymbols.Count == 0) { popCurrentState(); return; // continue; } } SymbolTreeNode nc = new SymbolTreeNode(c); n.children.Add(nc); /* * if ( nlist.Count == 0 && unusedStrokes == 0 ) { * if ( unusedInputStrokes == 0 ) acceptCurrentParseTree(); * } else { */ // append relation nodes to the END List <RelationTreeNode> nlist_rels = new List <RelationTreeNode>(); if (pr.result != null && pr.result.ABOVE.lbt.strokes.Count != 0) { nlist_rels.Add(pr.result.ABOVE); } if (pr.result != null && pr.result.BELOW.lbt.strokes.Count != 0) { nlist_rels.Add(pr.result.BELOW); } if (pr.result != null && pr.result.CONTAINS.lbt.strokes.Count != 0) { nlist_rels.Add(pr.result.CONTAINS); } if (pr.result != null && pr.result.SUBSC.lbt.strokes.Count != 0) { nlist_rels.Add(pr.result.SUBSC); } if (pr.result != null && pr.result.SUPER.lbt.strokes.Count != 0) { nlist_rels.Add(pr.result.SUPER); } //if ( pr.result != null && pr.result.BLEFT.lbt.strokes.Count != 0 ) nlist_rels.Add( pr.result.BLEFT ); //if ( pr.result != null && pr.result.TLEFT.lbt.strokes.Count != 0 ) nlist_rels.Add( pr.result.TLEFT ); foreach (RelationTreeNode rtn in nlist_rels) { nlist.Add(rtn); } minRequiredStrokes += nlist_rels.Count; applyRules(nlist); //} if (nlist.Count > 0 && nlist[0] == ParseTreeNode.EndOfBaseline) { nlist.RemoveAt(0); // ! } // remove any leftover relation nodes n.children.Remove(nc); List <ParseTreeNode> n_children_tmp = new List <ParseTreeNode>(n.children); for (int i = 0; i < n_children_tmp.Count; i++) { if (n_children_tmp[i] is RelationTreeNode) { n.children.Remove(n_children_tmp[i]); } } foreach (RelationTreeNode rtn in nlist_rels) { nlist.Remove(rtn); } popCurrentState(); } } } else { // NONTERMINALS //n.lexResult.segment.classification[0].symbol; List <string[]> productions = grammar.GetProductions(n.nodeType); if (productions == null) { #if DEBUG Console.Error.WriteLine("Error: invalid nonterminal ({0}).", n.nodeType); #endif return; } // remove one for the token we are replacing with productions minRequiredStrokes--; foreach (string[] production in productions) { minRequiredStrokes += production.Length; // prune by number of strokes left if (unusedInputStrokes < minRequiredStrokes) { minRequiredStrokes -= production.Length; continue; } // prune based on the candidate symbols and the first rule in production bool candidate_can_be_generated = false; foreach (LexerResult lr in candidateSymbols) { foreach (Classification csf in lr.segment.classification) { if (grammar.NonTerminalCanGenerateTerminal(production[0], csf.symbol)) { candidate_can_be_generated = true; break; } } if (candidate_can_be_generated) { break; } } if (!candidate_can_be_generated) { minRequiredStrokes -= production.Length; continue; } List <ParseTreeNode> nodes = new List <ParseTreeNode>(); foreach (string p in production) { ParseTreeNode n0 = new ParseTreeNode(); n0.nodeType = p; n0.lexResult = null; nodes.Add(n0); n.children.Add(n0); } for (int i = nodes.Count - 1; i >= 0; i--) { nlist.Insert(0, nodes[i]); } applyRules(nlist); // restore min required strokes minRequiredStrokes -= production.Length; foreach (ParseTreeNode node in nodes) { n.children.Remove(node); nlist.Remove(node); } } minRequiredStrokes++; } }