public WordArticle Parse(string word, string description) { List <Token> tokens = lexer.Parse(description); WordArticle article = new WordArticle(); WordFormGroup formGroup = new WordFormGroup(); article.FormGroups.Add(formGroup); formGroup.Forms.Add(word); WordTranslationGroup translationGroup = null; TokenType lastTokenType1 = TokenType.WordForm; TokenType lastTokenType2 = TokenType.NewLine; foreach (Token token in tokens) { switch (token.Type) { case TokenType.Example: if (translationGroup == null) { translationGroup = new WordTranslationGroup(); formGroup.TranslationGroups.Add(translationGroup); } else { translationGroup.Examples.Add(token.Value); } break; case TokenType.Translation: if (translationGroup == null || translationGroup.Examples.Any() || (lastTokenType1 == TokenType.NewLine && lastTokenType2 == TokenType.NewLine)) { translationGroup = new WordTranslationGroup(); formGroup.TranslationGroups.Add(translationGroup); } translationGroup.Translations.Add(token.Value); break; case TokenType.WordForm: if (formGroup.TranslationGroups.Any()) { formGroup = new WordFormGroup(); article.FormGroups.Add(formGroup); translationGroup = null; } formGroup.Forms.Add(token.Value); break; } lastTokenType2 = lastTokenType1; lastTokenType1 = token.Type; } return(article); }
public WordArticle Parse(string word, string description) { List<Token> tokens = lexer.Parse(description); WordArticle article = new WordArticle(); WordFormGroup formGroup = new WordFormGroup(); article.FormGroups.Add(formGroup); formGroup.Forms.Add(word); WordTranslationGroup translationGroup = null; TokenType lastTokenType1 = TokenType.WordForm; TokenType lastTokenType2 = TokenType.NewLine; foreach (Token token in tokens) { switch (token.Type) { case TokenType.Example: if (translationGroup == null) { translationGroup = new WordTranslationGroup(); formGroup.TranslationGroups.Add(translationGroup); } else { translationGroup.Examples.Add(token.Value); } break; case TokenType.Translation: if (translationGroup == null || translationGroup.Examples.Any() || (lastTokenType1 == TokenType.NewLine && lastTokenType2 == TokenType.NewLine)) { translationGroup = new WordTranslationGroup(); formGroup.TranslationGroups.Add(translationGroup); } translationGroup.Translations.Add(token.Value); break; case TokenType.WordForm: if (formGroup.TranslationGroups.Any()) { formGroup = new WordFormGroup(); article.FormGroups.Add(formGroup); translationGroup = null; } formGroup.Forms.Add(token.Value); break; } lastTokenType2 = lastTokenType1; lastTokenType1 = token.Type; } return article; }
public void FilterTranslationEvents() { ArticleParser parser = new ArticleParser(); WordArticle article = parser.Parse(name, description); HashSet <string> translations = new HashSet <string>(article.GetAllTranslations()); for (int i = 0; i < Events.Count;) { WordEvent wordEvent = events[i]; if (wordEvent.Translation == null || translations.Contains(wordEvent.Translation)) { i++; } else { events.RemoveAt(i); } } }
private static void ParseWord(WordInfo info) { ArticleParser parser = new ArticleParser(); WordArticle article = parser.Parse(info.Word.Name, info.Word.Description); List <string> forms = article.FormGroups.SelectMany(g => g.Forms).Distinct().ToList(); info.Forms.Clear(); foreach (string form in forms) { info.Forms.Add(new WordForm(info, form)); } List <string> translations = article.FormGroups.SelectMany(g => g.TranslationGroups).SelectMany(tg => tg.Translations).Distinct().ToList(); info.Translations.Clear(); foreach (string translation in translations) { info.Translations.Add(translation); } }