public bool Advance() { RegexTokeniser bestTokeniser; Match match; FindMatchingTokeniser(out bestTokeniser, out match); if (bestTokeniser != null) { Current = new Token(bestTokeniser.Type, match.Value, _currentPosition, _charNumber, _lineNumber); _currentPosition += match.Value.Length; int charactersAfterLineBreak; int linesInMatch; if (CurrentTokenSpansLineBreak(out charactersAfterLineBreak, out linesInMatch)) { _lineNumber += linesInMatch; _charNumber += charactersAfterLineBreak; } else { _charNumber += match.Value.Length; } return true; } Current = Token.Empty; return false; }
public void TestTokenToString() { var t = new Token( '+' ); string s = t.ToString(); Assert.AreEqual( "+", s ); }
public Expression Parse(Parser parser, Expression lhs, Token current) { var thenExpression = parser.ParseNext(); parser.Consume("COLON"); var elseExpression = parser.ParseExpression(Precedence - 1); return new ConditionalExpr(lhs, thenExpression, elseExpression); }
public Expression Parse(Parser parser, Expression lhs, Token current) { var functionName = ((IdentifierExpr) lhs); // we should already be on the leftparen token. var arguments = ParseArguments(parser).ToArray(); parser.Consume("RIGHTPAREN"); return new FunctionCallExpr(functionName, arguments); }
private Statement GetStatementFromToken(Token identifier, StatementTail tail) { var value = identifier != null ? identifier.Content : ""; if (identifier != null) { switch (identifier.Type) { case TokenType.StringLiteral: return new StringLiteral(_host, value); case TokenType.QuotedStringLiteral: return new StringLiteral(_host, value); case TokenType.StringLiteralPipe: return new StringLiteralPipe(_host, value.Substring(1)); case TokenType.MultiLineStringLiteral: return new StringLiteral(_host, value); } } return new Statement(_host, value, tail); }
static void Main() { File.Delete("J:\\Lexer12\\result.txt"); FileStream file_key = new FileStream("J:\\Lexer12\\keys.txt", FileMode.Open, FileAccess.Read); StreamReader reader_key = new StreamReader(file_key); string k = reader_key.ReadToEnd(); if (!File.Exists("J:\\Lexer12\\keywords.txt")) File.Create("J:\\Lexer12\\keywords.txt"); if (!File.Exists("J:\\Lexer12\\identify.txt")) File.Create("J:\\Lexer12\\identify.txt"); reader_key.Close(); while (!String.IsNullOrEmpty(k)) { string str = ""; int i = 0; while (k[i] != ',') { str = str + k[i]; i++; } if (!String.IsNullOrEmpty(k)) k = k.Substring(i + 1); // Console.WriteLine(Token.keys[0]); // Console.WriteLine(str); Token.keys.Add(str); } List<Token> ending = new List<Token>(); FileStream file1 = new FileStream("J:\\Lexer12\\keywords.txt", FileMode.Open, FileAccess.Write); FileStream file2 = new FileStream("J:\\Lexer12\\identify.txt", FileMode.Open, FileAccess.Write); StreamWriter writer = new StreamWriter(file1);// StreamWriter writer1 = new StreamWriter(file2);// try { FileStream file = new FileStream("J:\\Lexer12\\text11.pas", FileMode.Open, FileAccess.Read); StreamReader reader = new StreamReader(file); string g = reader.ReadToEnd(); string zap = g; string b=""; bool l = control(ref g, ref b); zap = g; if (l) { //Console.WriteLine(g); //Console.WriteLine("---------------------------\r\n"); g = erase_space(g); int i = 0; //Console.WriteLine(g); //Console.WriteLine("---------------------------\r\n"); while (!String.IsNullOrEmpty(g)) { string f = get_str(ref g); i++; // writer.Write(f); //writer.Write("\r\n"); Console.WriteLine(f); bool log = true; while (!String.IsNullOrEmpty(f)) { Token r = new Token(); int a = 0; string q = get_lex(ref f, ref log, ref a); r.define(q, a, ref zap); ending.Add(r); // writer.Write(q); // writer.Write("-"); //writer.Write(a); // writer.Write("\r\n"); } //Console.WriteLine(f); //Console.WriteLine("---------------------------\r\n"); } for (int j = 0; j < ending.Count; j++) { ending[j].conclude(); } for (int j = 0; j < Token.keys.Count; j++) { writer.Write(j); writer.Write(' '); writer.WriteLine(Token.keys[j]); } //writer.WriteLine("-----------------------------------------------------"); for (int j = 0; j < Token.iden.Count; j++) { writer1.Write(j); writer1.Write(' '); writer1.WriteLine(Token.iden[j]); } } else { Console.WriteLine("Недопустимый(ые) символ(ы)"); Console.WriteLine(b); } reader.Close(); writer.Close(); writer1.Close(); } catch { Console.WriteLine("Ошибка"); } Console.ReadKey(); }
public Expression Parse(Parser parser, Token token) { return new ConstantExpr(Parse(token.Lexeme)); }
static void Main(string[] args) { Console.Write("\n testing Semi"); Console.Write("\n =============="); // Access Semi through interface and object factory. // That isolates client from any changes that may occur to Semi // as long as ITokenCollection doesn't change. ITokenCollection semi = Factory.create(); string source = "../../semi.cs"; if (!semi.open(source)) { Console.Write("\n Can't open {0}\n", source); return; } while (!semi.isDone()) { semi.get(); semi.show(); } Console.Write("\n"); Console.Write("\n demonstrating semi operations"); Console.Write("\n -------------------------------"); ITokenCollection test = Factory.create(); test.add("one").add("two").add("three"); test.show(); if (test.hasSequence("one", "three")) Console.Write("\n semi has token \"one\" followed by token \"three\""); if (!test.hasSequence("foo", "two")) Console.Write("\n semi does not have token \"foo\" followed by token \"two\""); if (!test.hasTerminator()) Console.Write("\n semi does not have terminator"); Console.Write("\n demonstrate changing semi with insert and add"); test.insert(0, "#"); test.add("\n"); test.show(); Console.Write("\n demonstrate semi tests"); if (test.hasTerminator()) Console.Write("\n semi has terminator"); else Console.Write("\n semi does not have terminator"); int index; Token tok = "two"; if (test.find(tok, out index)) Console.Write("\n found token \"{0}\" at position {1}", tok, index); else Console.Write("\n did not find token \"{0}\"", tok); tok = "foo"; if (test.find(tok, out index)) Console.Write("\n found token \"{0}\" at position {1}", tok, index); else Console.Write("\n did not find token \"{0}\"", tok); tok = "one"; Token tok2 = test.predecessor(tok); Console.Write("\n predecessor of \"{0}\" is \"{1}\"", tok, tok2); tok = "bar"; tok2 = test.predecessor(tok); Console.Write("\n predecessor of \"{0}\" is \"{1}\"", tok, tok2); Console.Write("\n indexing semi\n "); for (int i = 0; i < test.size(); ++i) Console.Write("{0} ", test[i]); Console.Write("\n using foreach:\n "); foreach (var tk in test) Console.Write("{0} ", tk); Console.Write("\n\n"); }
public Expression Parse(Parser parser, Token token) { var expression = parser.ParseNext(); parser.Consume("RIGHTPAREN"); return expression; }
public void Background(Token keyword, Token name) { }
public void TestCreateToken() { var t = new Token( '+' ); Assert.AreEqual( '+', t.Tag ); }
public void ScenarioOutline(Token keyword, Token name) { }
public void Scenario(Token keyword, Token name) { }
public void PythonString(Token pyString) { }
public void Feature(Token keyword, Token name) { }
public void Examples(Token keyword, Token name) { }
public void Comment(Token content) { }
public Expression Parse(Parser parser, Token current) { return new IdentifierExpr(current.Lexeme); }
public void Step(Token keyword, Token name, StepKind stepKind) { }
public void Tag(Token name) { }
public Token Scan( StringReader reader ) { for ( ;; _peek = (char) reader.Read() ) { if ( _peek == ' ' || _peek == '\t' ) continue; if ( _peek == '\n' ) Line += 1; else break; } if ( Char.IsDigit( _peek ) ) { var buf = new StringBuilder(); do { buf.Append( _peek ); _peek = (char) reader.Read(); } while( Char.IsDigit( _peek ) ); if ( _peek == '.' ) { buf.Append( _peek ); _peek = (char) reader.Read(); while( Char.IsDigit( _peek ) ) { buf.Append( _peek ); _peek = (char)reader.Read(); } CultureInfo ci = (CultureInfo)CultureInfo.CurrentCulture.Clone(); ci.NumberFormat.CurrencyDecimalSeparator = "."; double d = double.Parse( buf.ToString(), NumberStyles.Any, ci ); return new Float( Tag.Float, d ); } return new Num( Tag.Num, int.Parse( buf.ToString() ) ); } if ( _peek == '.' ) { var buf = new StringBuilder(); buf.Append( _peek ); _peek = (char)reader.Read(); while ( Char.IsDigit( _peek ) ) { buf.Append( _peek ); _peek = (char)reader.Read(); } CultureInfo ci = (CultureInfo)CultureInfo.CurrentCulture.Clone(); ci.NumberFormat.CurrencyDecimalSeparator = "."; double d = double.Parse( buf.ToString(), NumberStyles.Any, ci ); return new Float( Tag.Float, d ); } if ( Char.IsLetter( _peek ) ) { var buf = new StringBuilder(); do { buf.Append( _peek ); _peek = (char) reader.Read(); } while( Char.IsLetterOrDigit( _peek ) ); string s = buf.ToString(); var w = (Word) _words[s]; if ( w != null ) return w; w = new Word( Tag.Id, s ); _words.Add( s, w ); return w; } if ( _peek == '/' ) { _peek = (char) reader.Read(); if ( _peek == '/' ) { _peek = (char) reader.Read(); var buf = new StringBuilder(); do { buf.Append( _peek ); _peek = (char) reader.Read(); } while( _peek != '\n' ); return new Comment( Tag.Comment, buf.ToString() ); } if ( _peek == '*' ) { var buf = new StringBuilder(); _peek = (char) reader.Read(); while( true ) { if ( _peek == '*' ) { _peek = (char) reader.Read(); if ( _peek == '/' ) { return new Comment( Tag.Comment, buf.ToString() ); } buf.Append( '*' ); } else { buf.Append( _peek ); _peek = (char) reader.Read(); } } } } if ( _peek == '<' ) { var buf = new StringBuilder(); buf.Append( _peek ); _peek = (char) reader.Read(); if ( _peek == '=' ) { buf.Append( _peek ); _peek = (char) reader.Read(); return new Word( Tag.LessOrEqual, buf.ToString() ); } return new Word( Tag.Less, buf.ToString() ); } if ( _peek == '>' ) { var buf = new StringBuilder(); buf.Append( _peek ); _peek = (char) reader.Read(); if ( _peek == '=' ) { buf.Append( _peek ); _peek = (char) reader.Read(); return new Word( Tag.BetterOrEqual, buf.ToString() ); } return new Word( Tag.Better, buf.ToString() ); } if ( _peek == '=' ) { var buf = new StringBuilder(); buf.Append( _peek ); _peek = (char) reader.Read(); if ( _peek == '=' ) { buf.Append( _peek ); _peek = (char) reader.Read(); return new Word( Tag.Equal, buf.ToString() ); } } if ( _peek == '!' ) { var buf = new StringBuilder(); buf.Append( _peek ); _peek = (char) reader.Read(); if ( _peek == '=' ) { buf.Append( _peek ); _peek = (char) reader.Read(); return new Word( Tag.NotEqual, buf.ToString() ); } } var t = new Token( _peek ); _peek = ' '; return t; }
//----< does semi contain specific token? >---------------------- public bool contains(Token tok) { if (toks.Contains(tok)) return true; return false; }
public abstract Statement Parse(Parser parser, Token current);
//----< add a token to the end of this semi-expression >--------- public ITokenCollection add(Token token) { toks.Add(token); return this; }
public FakeScanner(Token [] tokens) { if (tokens == null) throw new ArgumentNullException(); _tokens = tokens; }
public Token GetNextToken() { var token = new Token() { StartColumn = _currentColumn + 1, StartLine = _currentLine }; TokenType lastErrorToken = TokenType.Error; if (_currentInputIndex >= _inputCode.Length) { token.TokenType = TokenType.EOF; return(token); } TokenType type = TokenType.Intermediate; int stateID = 0; int previousCol = 0; int previousRow = 0; bool startPosSet = false; _dfa.Begin(); do { char currentChar = _currentInputIndex < _inputCode.Length ? _inputCode.ElementAt(_currentInputIndex) : '~'; // Insert EOF tag if past end of input if (!_dfa.IsCharacterInAlphabet(currentChar)) { _currentInputIndex++; continue; } if (!startPosSet && !char.IsWhiteSpace(currentChar)) { token.StartColumn = _currentColumn + 1; token.StartLine = _currentLine; startPosSet = true; } previousCol = _currentColumn; previousRow = _currentLine; if (currentChar == '\n') { _currentColumn = 0; _currentLine++; } else { _currentColumn++; } if (stateID == 1 && currentChar == '~') { type = TokenType.EOF; continue; } lastErrorToken = _dfa.GetErrorType(stateID); stateID = _dfa.GetNextStateID(currentChar); type = _dfa.GetStateType(stateID); token.Lexeme += currentChar; _currentInputIndex++; }while (type == TokenType.Intermediate); if (_dfa.IsBacktrackingState(stateID)) { _currentInputIndex--; _currentColumn = previousCol; _currentLine = previousRow; token.Lexeme = token.Lexeme.Remove(token.Lexeme.Length - 1, 1); // undo col row position change } token.Lexeme = token.Lexeme.Trim().Replace("~", ""); //token.StartLine = _currentLine; //token.StartColumn = _currentColumn; token.TokenType = type; if (token.TokenType == TokenType.Identifier) { var keywordMapping = _dfa.GetKeywordMapping(); TokenType keywordToken = TokenType.Intermediate; if (keywordMapping.TryGetValue(token.Lexeme, out keywordToken)) { token.TokenType = keywordToken; } } if (token.TokenType == TokenType.Error) { token.TokenType = lastErrorToken; } return(token); }