public static TokenTypes GetOperatorTokenType(string value) { TokenTypes tokenType; if (SimpleOperators.TryGetValue(value, out tokenType) || BlockOpeningOperators.TryGetValue(value, out tokenType)) { return(tokenType); } return(TokenTypes.UNKNOWN); }
public static TokenTypes GetIDTokenType(string value) { TokenTypes tokenType; if (ReservedIDs.TryGetValue(value, out tokenType) || SimpleOperators.TryGetValue(value, out tokenType) || BlockOpeningOperators.TryGetValue(value, out tokenType)) { return(tokenType); } return(TokenTypes.ID); }
public string ToSqlOperator(string kendoOperator) { if (SimpleOperators.ContainsKey(kendoOperator)) { return(SimpleOperators[kendoOperator]); } if (TextualOperators.Contains(kendoOperator)) { return("LIKE"); } throw new InvalidCastException($"invalid or not supported operator {kendoOperator}"); }
public void Execute(List <string> input) { var mode = TokenizerMode.normal; int lineNumber = 0; int charNumber = 0; Results = new List <List <ExoToken> >(); var tokenLine = new List <ExoToken>(); var isLastWordOperator = false; var exitMode = false; string currentWord = string.Empty; // Line Parsing Section for (lineNumber = 0; lineNumber < input.Count; lineNumber++) { // start of new line var currentLine = input[lineNumber]; tokenLine = new List <ExoToken>(); // Character Parsing Section for (charNumber = 0; charNumber < currentLine.Length; charNumber++) { char c = currentLine[charNumber]; DecideCharacterHandling(c); } if (mode == TokenizerMode.quoted) { // ERROR: forgot to close quote!; Debug.WriteLine($"Missing Quotes: {lineNumber}"); } FinishLastWord(); mode = TokenizerMode.normal; isLastWordOperator = false; exitMode = false; Results.Add(tokenLine); } // Character handling logic, abstracted to keep internal loop clean void DecideCharacterHandling(char c) { CheckForModeSwitch(c); if (Separators.Contains(c) && mode == TokenizerMode.normal) { FinishLastWord(); } else { HandleNormalCharacters(c); } } void CheckForModeSwitch(char c) { if (mode == TokenizerMode.comment) { return; } if (c == CommentCharacter && mode != TokenizerMode.quoted) { FinishLastWord(); mode = TokenizerMode.comment; return; } if (c == QuoteCharacter) { if (mode == TokenizerMode.quoted) { exitMode = true; return; } FinishLastWord(); mode = TokenizerMode.quoted; return; } } // handling spaces and tabulators void HandleNormalCharacters(char c) { if (SimpleOperators.Contains(c) && mode == TokenizerMode.normal) { // expand existing operators if (isLastWordOperator) { if (tokenLine.Last().TryExpandOperator(c)) { // work performed in TryExpand return; } } // split and identify new operators FinishLastWord(); currentWord += c; FinishLastWord(); tokenLine.Last().IsOperator = true; isLastWordOperator = true; return; } isLastWordOperator = false; currentWord += c; if (exitMode) { exitMode = false; FinishLastWord(); mode = TokenizerMode.normal; } } // finish the current word and add it to the CURRENT token handling list void FinishLastWord() { if (currentWord == string.Empty) { return; } var token = new ExoToken() { value = currentWord, line = lineNumber, type = mode }; tokenLine.Add(token); currentWord = string.Empty; } }