static bool IsSpecialIdentifier(Symbol name, out bool backquote) { backquote = name.Name.Length == 0; bool special = false, first = true; foreach (char c in name.Name) { if (!LesLexer.IsIdContChar(c)) { if (LesLexer.IsSpecialIdChar(c)) { special = true; } else { backquote = true; } } else if (first && !LesLexer.IsIdStartChar(c)) { special = true; } first = false; } // Watch out for @`-inf_d` and @`-inf_f`, because they will be // interpreted as named literals if we don't backquote them. if (special && !backquote && (name.Name == "-inf_d" || name.Name == "-inf_f")) { backquote = true; } return(special || backquote); }
void Case(string input, TokenType[] tokenTypes, params object[] values) { Debug.Assert(values.Length <= tokenTypes.Length); bool error = false; var lexer = new LesLexer(input, new MessageSinkFromDelegate((type, ctx, msg, args) => { MessageSink.Trace.Write(type, ctx, msg, args); error = true; })); int index = 0; for (int i = 0; i < tokenTypes.Length; i++) { error = false; Token token = lexer.NextToken().Value; Assert.LessOrEqual(index, token.StartIndex); Assert.AreEqual(tokenTypes[i], token.Type()); if (i < values.Length) { Assert.AreEqual(values[i] == (object)ERROR, error); if (!error) { Assert.AreEqual(values[i], token.Value); } } index = token.EndIndex; } var nothing = lexer.NextToken(); Assert.That(!nothing.HasValue); }
protected override void StartToken(char nextCh) { if (_newlinePending) { Newline(); } if (LesLexer.IsIdContChar(_lastCh) && LesLexer.IsIdContChar(nextCh)) { _out.Write(' '); } else if (LesLexer.IsOpContChar(_lastCh) && LesLexer.IsOpContChar(nextCh)) { _out.Write(' '); } }
protected override void StartToken(char nextCh) { if (_newlinePending) { Newline(); } if (LesLexer.IsIdContChar(_lastCh) && LesLexer.IsIdContChar(nextCh)) { _out.Write(' '); } else if (LesLexer.IsOpContChar(_lastCh) && LesLexer.IsOpContChar(nextCh)) { _out.Write(' '); } else if (_lastCh == '-' && (nextCh >= '0' && nextCh <= '9')) // - 2 is different from -2 (-(2) vs integer literal) { _out.Write(' '); } }
private void PrintIdOrSymbol(Symbol name, bool isSymbol) { // Figure out what style we need to use: plain, \\special, or \\`backquoted` bool special = isSymbol, backquote = name.Name.Length == 0, first = true; foreach (char c in name.Name) { if (!LesLexer.IsIdContChar(c)) { if (LesLexer.IsSpecialIdChar(c)) { special = true; } else { backquote = true; } } else if (first && !LesLexer.IsIdStartChar(c)) { special = true; } first = false; } if (special || backquote) { _out.Write(isSymbol ? "@@" : "@", false); } if (backquote) { PrintStringCore('`', false, name.Name); } else { _out.Write(name.Name, true); } }
private void TestLes(string input, string expectOutput, int expectMessages = 0, Severity expectSev = 0) { using (G.PushTLV(Token.ToStringStrategyTLV, TokenExt.ToString)) { MessageHolder errorList; var input2 = StripInitialNewline(input); var lexer = new LesLexer(input2, errorList = new MessageHolder()); var wrapr = new LesIndentTokenGenerator(lexer); var output = new DList<Token>(); for (var t = wrapr.NextToken(); t.HasValue; t = wrapr.NextToken()) output.Add(t.Value); var expectTokens = new LesLexer(expectOutput, MessageSink.Console).Buffered().Select(t => t.Type() == TokenType.LBrack ? t.WithType((int)TokenType.Indent) : t.Type() == TokenType.RBrack ? t.WithType((int)TokenType.Dedent) : t).ToList(); AreEqual(expectMessages, errorList.List.Count); if (expectMessages > 0) AreEqual(expectSev, errorList.List.Max(m => m.Severity)); ExpectList(output, expectTokens, false); } }
public ILexer <Token> Tokenize(ICharSource text, string fileName, IMessageSink msgs) { var lexer = new LesLexer(text, fileName, msgs); return(new LesIndentTokenGenerator(new WhitespaceFilter(lexer))); }
List<Token> Lex(string input, bool skipWS = true) { var lexer = new LesLexer(input, MessageSink.Trace); var lexer2 = new TokensToTree(lexer, skipWS); var list = new List<Token>(); Maybe<Token> token; while ((token = lexer2.NextToken()).HasValue) list.Add(token.Value); return list; }
public ILexer<Token> Tokenize(ICharSource text, string fileName, IMessageSink msgs) { var lexer = new LesLexer(text, fileName, msgs); return new LesIndentTokenGenerator(new WhitespaceFilter(lexer)); }