public List <Token> GetTokens(SimpleNetNlp.Sentence stanfordSentence) { var tokens = new List <Token>(); for (int i = 0; i < stanfordSentence.Words.Count; i++) { var token = new Token(); token.Word = stanfordSentence.Words.ElementAt(i); token.Lemmas = stanfordSentence.Lemmas.ElementAt(i); token.PosTag = stanfordSentence.PosTags.ElementAt(i); token.NerTag = stanfordSentence.NerTags.ElementAt(i); token.IncomingDependencyLabel = stanfordSentence.IncomingDependencyLabels.ElementAt(i); token.Governor = stanfordSentence.Governors.ElementAt(i); tokens.Add(token); } return(tokens); }
public void CorrectTokens() { Directory.SetCurrentDirectory(@"C:\sharkbot\stanford-corenlp-3.9.1-models"); var properties = new Properties(); properties.setProperty("annotators", "tokenize, ssplit, pos, lemma, ner, parse, dcoref, sentiment"); properties.setProperty("ner.useSUTime", "0"); var service = new TokenService(); var stanfordSentence = new SimpleNetNlp.Sentence("hello world"); var tokens = service.GetTokens(stanfordSentence); Assert.Equal(2, tokens.Count); var token = tokens.FirstOrDefault(); Assert.Equal("hello", token.Word); Assert.Equal("hello", token.Lemmas); Assert.Equal("O", token.NerTag); Assert.Equal("UH", token.PosTag); }