private string Stringize(TokKind tid) { if (_mapTidStr == null) { // Build the inverse key word table, mapping token kinds to strings. _mapTidStr = new Dictionary <TokKind, string>(); foreach (var kvp in _kwt.KeyWords) { if (!kvp.Value.IsContextKeyWord) { _mapTidStr[kvp.Value.Kind] = kvp.Key.Value.ToString(); } } foreach (var kvp in _kwt.Punctuators) { _mapTidStr[kvp.Value] = kvp.Key.Value.ToString(); } } string str; if (_mapTidStr.TryGetValue(tid, out str)) { return(str); } return(string.Format("<{0}>", tid)); }
private string GetString(TokKind tidCompare) { switch (tidCompare) { case TokKind.Equ: return(" = "); case TokKind.EquEqu: return(" == "); case TokKind.LssGrt: return(" <> "); case TokKind.BngEqu: return(" != "); case TokKind.Lss: return(" < "); case TokKind.LssEqu: return(" <= "); case TokKind.GrtEqu: return(" >= "); case TokKind.Grt: return(" > "); default: Contracts.Assert(false); return(" <bad> "); } }
public void AddPunctuator(string str, TokKind tid) { Contracts.AssertNonEmpty(str); if (!TryAddPunctuator(str, tid)) { Contracts.Assert(false, "duplicate punctuator!"); } }
public static Token CreateKeyWord(TextSpan span, string str, TokKind tid, bool isContextKeyWord) { if (isContextKeyWord) { return(new IdentToken(span, str, tid)); } return(new KeyToken(span, tid)); }
protected Token(TextSpan span, TokKind tid, TokKind tidContext) { // Currently the only contextual variability is that an identifier might double as a keyword. Contracts.Assert(tidContext == tid || tid == TokKind.Ident); Span = span; Kind = tid; KindContext = tidContext; }
public void MoveTo(int tokenIndex) { AssertValid(); Contracts.AssertIndex(_currentTokenIndex, _tokenCount); _currentTokenIndex = tokenIndex; _currentToken = _tokens[_currentTokenIndex]; _currentTokenId = _currentToken.Kind; AssertValid(); }
public TokenCursor(Token[] rgtok) { Contracts.AssertValue(rgtok); Contracts.Assert(rgtok.Length > 0 && rgtok[rgtok.Length - 1].Kind == TokKind.Eof); _tokens = rgtok; _tokenCount = _tokens.Length; _currentToken = _tokens[0]; _currentTokenId = _currentToken.Kind; }
// Returns the current token if it's of the given kind and moves to the next token. // If the token is not the right kind, reports an error, leaves the token, and returns null. private Token TokEat(TokKind tid) { if (_curs.TidCur == tid) { return(_curs.TokMove()); } ErrorTid(_curs.TokCur, tid); return(null); }
// Eats a token of the given kind. // If the token is not the right kind, reports an error and leaves it. private bool EatTid(TokKind tid) { if (_curs.TidCur == tid) { _curs.TokMove(); return(true); } ErrorTid(_curs.TokCur, tid); return(false); }
// Eats a token of the given kind. If the token is not the right kind, // leaves the current token and reports and returns an error. private bool EatTid(TokKind tid) { if (TidCur == tid || CtxCur == tid) { TidNext(); return(true); } PostTidError(TokCur, tid); return(false); }
// Returns the current token if it's of the given kind and moves to the next token. // If the token is not the right kind, reports an error, leaves the token, and returns null. private Token TokEat(TokKind tid) { if (TidCur == tid) { return(TokMove()); } PostTidError(TokCur, tid); return(null); }
public TokenCursor(IEnumerable <Token> tokens) { Contracts.AssertValue(tokens); _tokens = tokens.GetEnumerator(); _buffer = new Token[0x0400]; _itokPin = -1; // Get the first token. FetchCore(); _tokCur = _buffer[_itokCur]; _tidCur = _tokCur.Kind; AssertValid(); }
// This expects that _itokCur + ditok is either within the buffered token range or // just at the end of it. In other words, it does not support skipping tokens. private void MoveBy(int ditok) { AssertValid(); Contracts.Assert(-_itokCur <= ditok && ditok <= _itokLim - _itokCur); Contracts.Assert(ditok < _itokLim - _itokCur || _tokens != null); while (ditok >= _itokLim - _itokCur) { FetchToken(); } _itokCur += ditok; _tokCur = _buffer[_itokCur]; _tidCur = _tokCur.Kind; AssertValid(); }
private CompareNode ParseCompareExpr(ExprNode node, CompareOp op, TokKind tidLax, TokKind tidStrict) { Contracts.AssertValue(node); Contracts.Assert(TidCur == tidLax || TidCur == tidStrict); Token tok = TokCur; List <Node> list = new List <Node>(); List <Token> ops = new List <Token>(); list.Add(node); for (; ;) { if (TidCur != tidLax && TidCur != tidStrict) { break; } ops.Add(TokMove()); list.Add(ParseExpr(Precedence.Compare + 1)); } Contracts.Assert(list.Count >= 2); // The grammar disallows mixed direction expressions like: // a < b > c <= 4 // After posting an error, we continue parsing to produce: ((a < b) > c) <= 4. // Note that this will also produce a type checking error. Contracts.Assert(TidCur != tidLax); Contracts.Assert(TidCur != tidStrict); switch (TidCur) { case TokKind.LssGrt: case TokKind.BngEqu: case TokKind.Equ: case TokKind.EquEqu: case TokKind.Lss: case TokKind.LssEqu: case TokKind.Grt: case TokKind.GrtEqu: PostError(TokCur, "Mixed direction not allowed"); break; } return(new CompareNode(tok, op, new ListNode(tok, list.ToArray(), ops.ToArray()))); }
public override void Visit(CompareNode node) { Contracts.AssertValue(node); if (TryShowValue(node)) { return; } TokKind tidLax = node.TidLax; TokKind tidStrict = node.TidStrict; string strLax = GetString(tidLax); string strStrict = GetString(tidStrict); string str = string.Empty; string strOp = string.Empty; for (int i = 0; ;) { _wrt.Write(strOp); var arg = node.Operands.Items[i]; var prec = GetPrec(arg); if (prec <= Precedence.Compare) { _wrt.Write('('); } arg.Accept(this); if (prec <= Precedence.Compare) { _wrt.Write(')'); } if (++i >= node.Operands.Items.Length) { break; } var tid = node.Operands.Delimiters[i - 1].Kind; Contracts.Assert(tid == tidLax || tid == tidStrict); strOp = tid == tidLax ? strLax : strStrict; } ShowType(node); }
private ListNode ParseList(Token tok, TokKind tidEmpty) { if (TidCur == tidEmpty) { return(new ListNode(tok, new Node[0], null)); } List <Token> commas = null; List <Node> list = new List <Node>(); for (; ;) { list.Add(ParseExpr()); if (TidCur != TokKind.Comma) { break; } Utils.Add(ref commas, TokMove()); } return(new ListNode(tok, list.ToArray(), Utils.ToArray(commas))); }
/// <summary> /// Called to lex a punctuator (operator). Asserts the current character lex type /// is LexCharType.Punc. /// </summary> private Token LexPunc() { int cchPunc = 0; TokKind tidPunc = TokKind.None; _sb.Length = 0; _sb.Append(ChCur); for (; ;) { TokKind tidCur; NormStr nstr = _lex._pool.Add(_sb); if (!_lex._kwt.IsPunctuator(nstr, out tidCur)) { break; } if (tidCur != TokKind.None) { // This is a real punctuator, not just a prefix. tidPunc = tidCur; cchPunc = _sb.Length; } char ch = ChPeek(_sb.Length); if (!LexCharUtils.IsPunc(ch)) { break; } _sb.Append(ch); } if (cchPunc == 0) { return(LexError()); } while (--cchPunc >= 0) { ChNext(); } return(KeyToken.Create(GetSpan(), tidPunc)); }
public bool TryAddPunctuator(string str, TokKind tid) { Contracts.AssertNonEmpty(str); // Note: this assumes that once a prefix is found, that all shorter // prefixes are mapped to something (TokKind.None to indicate that // it is only a prefix and not itself a token). TokKind tidCur; NormStr nstr = _pool.Add(str); if (_mpnstrtidPunc.TryGetValue(_pool.Add(str), out tidCur)) { if (tidCur == tid) { return(true); } if (tidCur != TokKind.None) { return(false); } } else { // Map all prefixes (that aren't already mapped) to TokKind.None. for (int cch = str.Length; --cch > 0;) { NormStr nstrTmp = _pool.Add(str.Substring(0, cch)); TokKind tidTmp; if (_mpnstrtidPunc.TryGetValue(_pool.Add(nstrTmp.Value), out tidTmp)) { break; } _mpnstrtidPunc.Add(nstrTmp, TokKind.None); } } _mpnstrtidPunc[nstr] = tid; return(true); }
public CompareNode(Token tok, CompareOp op, ListNode operands) : base(tok) { Contracts.AssertValue(operands); Contracts.Assert(operands.Items.Length >= 2); Contracts.AssertValue(operands.Delimiters); Contracts.Assert(operands.Delimiters.Length == operands.Items.Length - 1); Op = op; Operands = operands; switch (op) { default: Contracts.Assert(false); goto case CompareOp.Equal; case CompareOp.Equal: TidLax = TokKind.Equ; TidStrict = TokKind.EquEqu; break; case CompareOp.NotEqual: TidLax = TokKind.LssGrt; TidStrict = TokKind.BngEqu; break; case CompareOp.IncrChain: TidLax = TokKind.LssEqu; TidStrict = TokKind.Lss; break; case CompareOp.DecrChain: TidLax = TokKind.GrtEqu; TidStrict = TokKind.Grt; break; } }
private void ErrorTid(Token tok, TokKind tidWanted) { Contracts.Assert(tidWanted != tok.Kind); PostError(tok, TexlStrings.ErrExpectedFound_Ex_Fnd, tidWanted, tok); }
private KeyToken(TextSpan span, TokKind tid) : base(span, tid) { }
public IdentToken(TextSpan span, string val, TokKind tidContext) : base(span, TokKind.Ident, tidContext) { Value = val; }
public bool IsPunctuator(NormStr nstr, out TokKind tid) { Contracts.Assert(!nstr.Value.IsEmpty); return(_mpnstrtidPunc.TryGetValue(nstr, out tid)); }
public KeyWordKind(TokKind kind, bool isContextKeyWord) { Kind = kind; IsContextKeyWord = isContextKeyWord; }
public void AddKeyWord(string str, TokKind tid) { Contracts.AssertNonEmpty(str); _mpnstrtidWord.Add(_pool.Add(str), new KeyWordKind(tid, false)); }
private void PostTidError(Token tok, TokKind tidWanted) { Contracts.Assert(tidWanted != tok.Kind); Contracts.Assert(tidWanted != tok.KindContext); PostError(tok, "Expected: '{0}', Found: '{1}'", Stringize(tidWanted), Stringize(tok)); }
protected ReplaceableToken(string value, TokKind kind, Span span) : base(kind, span) { Contracts.AssertValue(value); _val = value; }
public KeyToken(TokKind tid, Span span) : base(tid, span) { }
public Token(TokKind tid, Span span) { Kind = tid; Span = span; }
// Gets the string corresponding to token kinds used in binary or unary nodes. internal static string GetTokString(TokKind kind) { switch (kind) { case TokKind.And: return(TexlLexer.PunctuatorAnd); case TokKind.Or: return(TexlLexer.PunctuatorOr); case TokKind.Bang: return(TexlLexer.PunctuatorBang); case TokKind.Add: return(TexlLexer.PunctuatorAdd); case TokKind.Sub: return(TexlLexer.PunctuatorSub); case TokKind.Mul: return(TexlLexer.PunctuatorMul); case TokKind.Div: return(TexlLexer.PunctuatorDiv); case TokKind.Caret: return(TexlLexer.PunctuatorCaret); case TokKind.Ampersand: return(TexlLexer.PunctuatorAmpersand); case TokKind.PercentSign: return(TexlLexer.PunctuatorPercent); case TokKind.Equ: return(TexlLexer.PunctuatorEqual); case TokKind.Lss: return(TexlLexer.PunctuatorLess); case TokKind.LssEqu: return(TexlLexer.PunctuatorLessOrEqual); case TokKind.Grt: return(TexlLexer.PunctuatorGreater); case TokKind.GrtEqu: return(TexlLexer.PunctuatorGreaterOrEqual); case TokKind.LssGrt: return(TexlLexer.PunctuatorNotEqual); case TokKind.Dot: return(TexlLexer.PunctuatorDot); case TokKind.In: return(TexlLexer.KeywordIn); case TokKind.Exactin: return(TexlLexer.KeywordExactin); case TokKind.BracketOpen: return(TexlLexer.PunctuatorBracketOpen); case TokKind.KeyOr: return(TexlLexer.KeywordOr); case TokKind.KeyAnd: return(TexlLexer.KeywordAnd); case TokKind.KeyNot: return(TexlLexer.KeywordNot); case TokKind.As: return(TexlLexer.KeywordAs); default: return(string.Empty); } }