private static void Tokenize(List <int> parserPriorities, List <Token> tokens) { for (int index = 0; index < parserPriorities.Count; index++) { int curParserPriority = parserPriorities[index]; bool tokenAdded = false; do { tokenAdded = false; for (int tokenIndex = 0; tokenIndex < tokens.Count; tokenIndex++) { RawToken rawToken = tokens[tokenIndex] as RawToken; Token precedentToken = (tokenIndex > 0) ? tokens[tokenIndex - 1] : null; if (rawToken != null) { FilterCriterionToken filterToken; int startIndex; int length; if (FilterParser.ExtractFirstCriterion(precedentToken, rawToken, curParserPriority, out filterToken, out startIndex, out length)) { tokens.RemoveAt(tokenIndex); if (startIndex > 0) { tokens.Insert(tokenIndex, new RawToken(rawToken.Value.Substring(0, startIndex))); tokenIndex++; } tokens.Insert(tokenIndex, filterToken); tokenIndex++; if (startIndex + length < rawToken.Value.Length) { tokens.Insert(tokenIndex, new RawToken(rawToken.Value.Substring(startIndex + length))); } tokenAdded = true; break; } } } }while(tokenAdded); } }