Token Accept(TokenTag tag, int delta = 1) { var token = new Token(code.Substring(i, delta), tag, position); Next(delta); return(token); }
public ParseNodeCollection Skip(TokenTag tag) { ParseNode node = Items[_index++]; if (!node.IsTerminal || node.Token.Tag != tag) { throw new SyntaxException("Unexpected parse node: {0} (expected: {1})", node.Tag, tag); } return(this); }
public IEnumerable <LangToken> ProcessGroup(IEnumerable <LangToken> group, MLang lang) { foreach (var tagExport in TagExport) { var tag = new TokenTag { Key = tagExport.Key, Value = tagExport.Value }; foreach (var token in group) { token.Tags[tag.Key] = tag; tag.AddParent(token); } } foreach (var tagPriority in TagPriority) { foreach (var token in group) { if (token.Tags.TryGetValue(tagPriority.Key, out TokenTag tag)) { tag.Priority = tagPriority.Value; } } } if (Handlers.Count == 0) { foreach (var token in group) { yield return(token); } } else { var used = new HashSet <LangToken>(); foreach (var handler in Handlers) { if (lang.TextProcessors.TryGetValue(handler, out ITokenProcessor processor)) { foreach (var r in processor.Process(group)) { used.Add(r); } } } foreach (var result in group) { if (used.Contains(result)) { yield return(result); } } } }
private void ConvertItemToTag(List <ITagSpan <TokenTag> > list, ParseItem item) { var hasTooltip = !item.IsValid; var supportsOutlining = item is Entry entry && entry.Properties.Any(); IEnumerable <ErrorListItem> errors = CreateErrorListItems(item); TokenTag tag = CreateToken(item.Type, hasTooltip, supportsOutlining, errors); SnapshotSpan span = new(Buffer.CurrentSnapshot, item); list.Add(new TagSpan <TokenTag>(span, tag)); }
public Token?Match(TokenTag tag) { if (CurrentIs(tag)) { var token = Current; NextToken(); return(token); } else { return(null); } }
public Token Expect(TokenTag tag) { if (Match(tag) is Token token) { return(token); } else { throw new SyntaxErrorException( $"expected, but didn't match, token {tag}", Current.Position); } }
private bool IsAssingOperator(TokenTag tag) { return(tag == TokenTag.ASSIGN || tag == TokenTag.PLUS_ASSIGN || tag == TokenTag.MINUS_ASSIGN || tag == TokenTag.MULTIPLY_ASSIGN || tag == TokenTag.MOD_ASSIGN || tag == TokenTag.BIT_AND_ASSIGN || tag == TokenTag.BIT_OR_ASSIGN || tag == TokenTag.BIT_XOR_ASSIGN || tag == TokenTag.LEFT_SHIFT_ASSIGN || tag == TokenTag.RIGHT_SHIFT_ASSIGN); }
public IEnumerable <LangToken> Process(IEnumerable <LangToken> tokens) { foreach (var token in tokens) { if (long.TryParse(token.Value, out long x)) { var newTag = new TokenTag(); newTag.Key = "integer"; newTag.Value = x.ToString(); token.Tags[newTag.Key] = newTag; newTag.AddParent(token); yield return(token); } } }
private Token RequireToken(TokenTag reqTokenTag) { if (!_enumerator.MoveNext()) { throw new SyntaxException("Input stream finished"); } var currToken = _enumerator.Current; if (currToken.Tag != reqTokenTag) { throw new SyntaxException("Expected {0} tokens. But actual is {1}", reqTokenTag, currToken); } return(currToken); }
internal IEnumerable <TokenTag> Tokenize(params RTextTokenTypes[] typesToKeep) { bool aFirstToken = true; //column in RText protocol starts at 1 int aColumn = 0; while (_lineText.Length > 0) { foreach (var type in RTextRegexMap.REGEX_MAP.Keys) { Match aMatch = RTextRegexMap.REGEX_MAP[type].Match(_lineText.ToString()); if (aMatch.Success) { if (typesToKeep.Count() == 0 || typesToKeep.Contains(type)) { TokenTag aCurrentTag = new TokenTag { Line = _lineNumber, Context = aMatch.Value, StartColumn = aColumn + aMatch.Index, EndColumn = aColumn + aMatch.Length, BufferPosition = _startPosition + aColumn, Type = type }; //special case for identifier if (type == RTextTokenTypes.Label) { aFirstToken = false; } else if (type == RTextTokenTypes.Identifier) { if (aFirstToken && !_isLineExtended) { aCurrentTag.Type = RTextTokenTypes.Command; aFirstToken = false; } } yield return(aCurrentTag); } aColumn += aMatch.Length; _lineText.Remove(0, aMatch.Length); break; } } } yield break; }
public TokenPredictor SkipUntil(TokenTag tag) { if (!Result) { return(this); } for (Token token = _enumerator.Lookahead(_shift); token.Tag != tag; _shift++) { token = _enumerator.Lookahead(_shift); if (token == default(Token)) { throw new SyntaxException("Input stream finished"); } } return(this); }
public IEnumerable <LangToken> Process(IEnumerable <LangToken> tokens) { Regex rgx = new Regex(@"^[a-zA-Z0-9]{5}$"); foreach (var token in tokens) { if (rgx.IsMatch(token.Value)) { var tag = new TokenTag { Key = "ticket", Value = token.Value }; token.Tags[tag.Key] = tag; tag.AddParent(token); yield return(token); } } }
private bool IsPrimitiveType(TokenTag tag) { return(tag == TokenTag.OBJECT || tag == TokenTag.BOOL || tag == TokenTag.CHAR || tag == TokenTag.SBYTE || tag == TokenTag.BYTE || tag == TokenTag.USHORT || tag == TokenTag.SHORT || tag == TokenTag.UINT || tag == TokenTag.INT || tag == TokenTag.ULONG || tag == TokenTag.LONG || tag == TokenTag.FLOAT || tag == TokenTag.DOUBLE || tag == TokenTag.DECIMAL || tag == TokenTag.STRING || tag == TokenTag.VOID); }
public static KnownTypeCode GetTypeCode(TokenTag tokenTag) { switch (tokenTag) { case TokenTag.OBJECT: return(KnownTypeCode.Object); case TokenTag.BOOL: return(KnownTypeCode.Boolean); case TokenTag.CHAR: return(KnownTypeCode.Char); case TokenTag.SBYTE: return(KnownTypeCode.SByte); case TokenTag.BYTE: return(KnownTypeCode.Byte); case TokenTag.USHORT: return(KnownTypeCode.UInt16); case TokenTag.SHORT: return(KnownTypeCode.Int16); case TokenTag.UINT: return(KnownTypeCode.UInt32); case TokenTag.INT: return(KnownTypeCode.Int32); case TokenTag.ULONG: return(KnownTypeCode.UInt64); case TokenTag.LONG: return(KnownTypeCode.Int64); case TokenTag.FLOAT: return(KnownTypeCode.Single); case TokenTag.DOUBLE: return(KnownTypeCode.Double); case TokenTag.DECIMAL: return(KnownTypeCode.Decimal); case TokenTag.STRING: return(KnownTypeCode.String); case TokenTag.VOID: return(KnownTypeCode.Void); default: return(KnownTypeCode.None); } }
public IEnumerable <LangToken> Process(IEnumerable <LangToken> tokens) { foreach (var token in tokens) { if (ModifiedTable.TryGetValue(token.Value, out Dictionary <string, string> dict)) { foreach (var key in dict) { if (key.Key == "value") { continue; } var tag = new TokenTag { Key = key.Key, Value = key.Value }; token.Tags[tag.Key] = tag; tag.AddParent(token); } yield return(token); } } }
public TokenPredictor Expect(TokenTag tag) { Result = Result ? _enumerator.Lookahead(_shift++).Tag == tag : false; return(this); }
public Token(string lexeme, TokenTag tag) { Lexeme = lexeme; Tag = tag; }
Token AcceptMark(TokenTag tag = TokenTag.Unknown) { return(new Token(code.Substring(mark, i - mark), tag, markPosition)); }
public bool Check(TokenTag tag) { return((_index >= 0 && _index < Items.Count) && Items[_index].Token.Tag == tag); }
private bool TryAddChild(ParseNode node, Func <ParseNode> production, TokenTag reqTokenTag) { return(TryAddChild(node, production, () => Expect(reqTokenTag))); }
public bool CurrentIs(TokenTag tag) => Current.Tag == tag;
private bool ExpectAndSkip(TokenTag tag) { return(Expect(tag) && _enumerator.MoveNext()); }
private bool Expect(TokenTag tag) { return(new TokenPredictor(_enumerator) .Expect(tag) .Result); }
public Token(TokenTag tag) { Lexeme = null; Tag = tag; }
private ParseNode Terminal(TokenTag reqTokenTag) { var token = RequireToken(reqTokenTag); return(new ParseNode(token)); }