public bool Match(WordData token, SolarixGrammarEngineNET.GrammarEngine2 gren) { if (lexeme != null) { return(token.GetWord().Equals(lexeme, StringComparison.InvariantCultureIgnoreCase)); } if (id_lemma != null) { int ekey = token.GetEntryID(); if (id_lemma.Contains(ekey)) { return(true); } return(false); } if (pos != null) { if (!pos.Contains(token.GetClassID())) { return(false); } } if (pairs != null && pairs.Count > 0) { bool a_version_matched = true; foreach (SolarixGrammarEngineNET.CoordPair p in pairs) { if (!token.ContainsTag(p.CoordID, p.StateID)) { a_version_matched = false; break; } } return(a_version_matched); } return(true); }
public int MatchTags(WordData token, SolarixGrammarEngineNET.GrammarEngine2 gren) { foreach (TagMatcher m in matchers) { if (m.Match(token, gren)) { return(m.GetId()); } } int entry_id = token.GetEntryID(); int pos_id = gren.GetEntryClass(entry_id); string part_of_speech = gren.GetClassName(pos_id); string tags = string.Join(" ", token.GetTags().Select(z => string.Format("{0}={1}", gren.GetCoordName(z.Item1), gren.GetCoordStateName(z.Item1, z.Item2))).ToArray()); string msg = string.Format("Can not find tag for {0} {{ {1} {2} }}", token.GetWord(), part_of_speech, tags); throw new ApplicationException(msg); }