/* Consumes a token and aborts the parser if there is no match */ private void NeedToken(LexToken token) { if (!MatchToken(token)) { throw new ParseException("Expected token: " + token); } }
public FunctionDeclaration( SyntaxTree syntaxTree, LexToken functionKeyword, LexToken identifier, LexToken openParentheses, SeparatedSyntaxList <ParameterSyntax> parameters, LexToken closeParentheses, TypeClauseSyntax optionalTypeClause, Statement body ) : base(syntaxTree) { FunctionKeyword = functionKeyword; Identifier = identifier; OpenParentheses = openParentheses; Parameters = parameters; CloseParentheses = closeParentheses; OptionalTypeClause = optionalTypeClause; Body = body; foreach (var parameter in Parameters) { parameter.Parent = this; } if (OptionalTypeClause is not null) { OptionalTypeClause.Parent = this; } Body.Parent = this; }
public void UnGet(LexToken lexToken) { if (lexToken != null) { _lexTokenStack.Push(lexToken); } }
public ExpBracketTagNew(ContextExp expContext, LexToken leftBracketToken, LexToken rightBracketToken, ExpTagNew tagExp) : base(expContext) { LeftBracketToken = leftBracketToken; RightBracketToken = rightBracketToken; TagExp = tagExp; }
private static JArray ParseArray(StreamReader sr, ref LexToken token) { Debug.Assert(token.TType == TokenType.LArray || token.TType == TokenType.RArray); ArrayList list = new ArrayList(); while (token.TType != TokenType.End && token.TType != TokenType.Error && token.TType != TokenType.RArray) { var value = ParseValue(sr, ref token); if (value != null) { list.Add(value); token = GetNextToken(sr); if (token.TType != TokenType.Comma && token.TType != TokenType.RArray) { throw new Exception("badly formed array"); } } } ; if (token.TType == TokenType.Error) { throw new Exception("unexpected token in array"); } else if (token.TType != TokenType.RArray) { throw new Exception("unterminated json array"); } var result = new JArray((JToken[])list.ToArray(typeof(JToken))); return(result); }
private bool IsNextLiteral() { LexToken token = _lexer.Peek(); return(token != null && (token.Type == LexTokenType.StringLiteral || token.Type == LexTokenType.DoubleLiteral)); }
public CompilationUnit( SyntaxTree syntaxTree, IEnumerable <MemberSyntax> members, LexToken endOfFileToken ) : this(syntaxTree, members.ToImmutableArray(), endOfFileToken) { }
public ElseClauseSyntax(SyntaxTree syntaxTree, LexToken elseKeyword, Statement statement) : base(syntaxTree) { ElseKeyword = elseKeyword; Statement = statement; Statement.Parent = this; }
public ParameterSyntax(SyntaxTree syntaxTree, LexToken identifier, TypeClauseSyntax type) : base(syntaxTree) { Identifier = identifier; Type = type; Type.Parent = this; }
public LexToken ToSingleToken() { string newText = string.Join("", TypeTokens.Select(P => P.Text)); LexToken firstToken = TypeTokens[0]; LexTokenText newToken = new LexTokenText(firstToken.Line, firstToken.Col, newText);//firstToken.Kind, return(newToken); }
public LexToken Next() { LexToken ret = _peekCache ?? ReadNext(); _peekCache = null; //Console.WriteLine($"{_input.Location} Next: {ret}"); return(ret); }
private bool IsNextPunctuation(char ch) { LexToken token = _lexer.Peek(); return(token != null && token.Type == LexTokenType.Punctuation && token.Value.ToString() == ch.ToString()); }
public ConditionalStatement( SyntaxTree syntaxTree, LexToken ifToken, Expression condition, Statement thenStatement ) : this(syntaxTree, ifToken, condition, thenStatement, null) { }
public static TypeDeclaration Interpret(LexToken<CharToken, ProductionKind> production) { if (production.Kind != ProductionKind.TypeDeclaration) throw new ArgumentException("Unepxected production kind"); var nonEmptyTokens = production.Contents.Where(c => c.Kind != DjinniLexTokenKind.Whitespace && c.Kind != DjinniLexTokenKind.Newline); var stepper = new Stepper<CharToken>(nonEmptyTokens); ExpectIdentifier(stepper.NextOrDefault()) .Then(i => ExpectEquals(i, stepper.NextOrDefault())) }
private Exp ParseEnumItemUse() { LexToken tok = (LexToken)Tape.Current; MoveNext(); ExpUseEnumItem exp2 = new ExpUseEnumItem(this.ExpContext, tok); //exp2.SetContextExp(this.expContext); return(exp2.Analy()); }
private Exp ParseFieldSuper() { LexToken tok = (LexToken)Tape.Current; MoveNext(); ExpFieldSuper exp2 = new ExpFieldSuper(this.ExpContext, tok); //exp2.SetContextExp(this.expContext); return(exp2.Analy()); }
private Exp ParsePropertyThis() { LexToken tok = (LexToken)Tape.Current; MoveNext(); ExpPropertyDef exp2 = new ExpPropertyDef(this.ExpContext, tok); //exp2.SetContextExp(this.expContext); return(exp2.Analy()); }
private Exp ParseArg() { LexToken tok = (LexToken)Tape.Current; MoveNext(); ExpArg argExp = new ExpArg(this.ExpContext, tok); //argExp.SetContextExp(this.expContext); return(argExp.Analy()); }
private Exp ParseLocalVar() { LexToken tok = (LexToken)Tape.Current; MoveNext(); ExpLocalVar expLocal = new ExpLocalVar(this.ExpContext, tok); //expLocal.SetContextExp(this.expContext); return(expLocal.Analy()); }
public ExpTypeTwo(ContextExp expContext, LexToken varToken1, LexToken varToken2, ZType varZtype1, ZType varZtype2, ZType varZtypeCreated) : base(expContext) { VarToken1 = varToken1; VarToken2 = varToken2; VarZtype1 = varZtype1; VarZtype2 = varZtype2; VarZtypeCreated = varZtypeCreated; this.RetType = varZtypeCreated; }
protected override void OnTokenProduced(LexToken <char, DjinniLexTokenKind> token) { if (token.IsOperatorOf("\"")) { isInQuotedString = !isInQuotedString; } if (token.Kind == DjinniLexTokenKind.Newline) { isInQuotedString = false; } }
private Exp ParseProcNamePart() { LexToken tok = (LexToken)Tape.Current; MoveNext(); ExpProcNamePart exp = new ExpProcNamePart(this.ExpContext, tok); //ExpBracket bracketexp = WarpExp(exp); //exp.SetContextExp(this.expContext); return(exp); }
public ReturnStatement(SyntaxTree syntaxTree, LexToken returnKeyword, Expression optionalExpression) : base(syntaxTree) { ReturnKeyword = returnKeyword; OptionalExpression = optionalExpression; if (OptionalExpression is not null) { OptionalExpression.Parent = this; } }
public UnaryExpression(SyntaxTree syntaxTree, LexToken operatorToken, Expression operand) : base(syntaxTree) { OperatorToken = operatorToken ?? throw new ArgumentNullException(nameof(operatorToken)); Operand = operand ?? throw new ArgumentNullException(nameof(operand)); Operand.Parent = this; }
protected override void OnTokenProduced(LexToken <Token, ProductionKind> token) { if (token.Kind == ProductionKind.OpenBlock) { isInBlock = true; } else if (token.Kind == ProductionKind.CloseBlock) { isInBlock = false; } }
/* Consumes a token iff it matches */ private bool MatchToken(LexToken token) { LexToken tok = lex(); if (tok != token) { lexpush(); return(false); } return(true); }
private ZType SearchZType(LexToken token) { string typeName = token.Text; var ztypes = this.ExpContext.FileContext.ImportUseContext.SearchImportType(typeName); ZType ztype = ztypes[0] as ZType; if (ztype == null) { Errorf(token.Position, "类型'{0}'不存在", token.Text); } return(ztype); }
public string GetText(object Data) { if (!IsToken(Data)) { return(Data.ToString()); } else { LexToken tok = (LexToken)Data; string text = tok.Text; return(text); } }
private static JToken ParseValue(StreamReader sr, ref LexToken token) { token = GetNextToken(sr); if (token.TType == TokenType.RArray) { // we were expecting a value in an array, and came across the end-of-array marker, // so this is an empty array return(null); } else if (token.TType == TokenType.String) { return(new JValue(token.TValue)); } else if (token.TType == TokenType.Number) { if (token.TValue.IndexOfAny(new char[] { '.', 'e', 'E' }) != -1) { return(new JValue(double.Parse(token.TValue))); } else { return(new JValue(int.Parse(token.TValue))); } } else if (token.TType == TokenType.True) { return(new JValue(true)); } else if (token.TType == TokenType.False) { return(new JValue(false)); } else if (token.TType == TokenType.Null) { return(new JValue(null)); } else if (token.TType == TokenType.Date) { throw new NotSupportedException("datetime parsing not supported"); } else if (token.TType == TokenType.LBrace) { return(ParseObject(sr, ref token)); } else if (token.TType == TokenType.LArray) { return(ParseArray(sr, ref token)); } throw new Exception("invalid value found during json parse"); }
private Exp ParseNameValueExp() { Exp leftExp = ParseBinaryLogicExp(); if (tape.HasCurrent && tape.Current.IsKind(TokenKindSymbol.Colon)) { tape.MoveNext(); Exp rightExp = ParseBinaryLogicExp(); if (leftExp is ExpChain) { ExpChain chainExp = leftExp as ExpChain; if (chainExp.SubCount == 1) { object varobj = chainExp.RawElements[0]; if (varobj is LexTokenText) { LexTokenText textToken = (varobj as LexTokenText); ExpNameValue expNameValue = new ExpNameValue(this.expContext, textToken, rightExp); return(expNameValue); } else { tape.error("参数名称错误"); return(rightExp); } } else { tape.error("参数名称的长度不是1"); return(rightExp); } } else if (leftExp is ExpVarBase) { ExpVarBase leftVarExp = (leftExp as ExpVarBase); LexToken varToken = leftVarExp.VarToken; ExpNameValue expNameValue = new ExpNameValue(this.expContext, varToken, rightExp); return(expNameValue); } else { tape.error("调用过程时指定的参数名称只能是标识符"); return(rightExp); } } else { return(leftExp); } }
public IncParser(string file) { FileName = file; /* Clear out lexer stuff */ LexPushed = false; _LastToken = LexToken.TOKEN_NONE; _LexString = null; _LexChar = '\0'; LineNo = 1; /* Initialize; this can throw an exception */ Initialize(); }
/* Consumes a token iff it matches */ private bool MatchToken(LexToken token) { LexToken tok = lex(); if (tok != token) { lexpush(); return false; } return true; }
public KeywordToken(string k, LexToken t) { keyword = k; token = t; }
private static void AddToken(string lex, LexToken tok) { s_tokens[s_position] = new KeywordToken(lex, tok); s_position++; }
/* Consumes one lexical token and gathers information about it */ private LexToken lex() { if (LexPushed) { LexPushed = false; return _LastToken; } /* Number of chars we will be deleting from the input */ int stripchars = 0; /* Clear our state */ _LastToken = LexToken.TOKEN_NONE; /* Remove stuff we don't want from the line */ preprocess_line(); if (Contents.Length < 1) { _LastToken = LexToken.TOKEN_EOF; return _LastToken; } /* Get the token list */ KeywordToken[] tokens = Tokenizer.Tokens; for (int i = 0; i < tokens.Length; i++) { if (!CheckString(0, tokens[i].keyword)) { continue; } int len = tokens[i].keyword.Length; /* Now check to see what the next token is */ if (Contents.Length == len || IsNotKeywordChar(Contents[len])) { /* We have a token match! */ _LastToken = tokens[i].token; _LexString = null; _LexChar = '\0'; stripchars = len; break; } } if (_LastToken == LexToken.TOKEN_NONE) { /* See if we can try to read a symbol */ if (IsSymbolChar(Contents[0], true)) { int characters = 1; while (Contents[characters] != '\0' && IsSymbolChar(Contents[characters], false)) { characters++; } stripchars = characters; /* We're done! See what's next.. */ if (Contents[characters] == ':') { _LastToken = LexToken.TOKEN_LABEL; stripchars++; } else { _LastToken = LexToken.TOKEN_SYMBOL; } _LexString = Contents.ToString().Substring(0, characters); _LexChar = _LexString[0]; }/* else if (Contents[0] == '"') { bool reached_end = false; StringBuilder builder = new StringBuilder(); for (int i = 1; i < Contents.Length; i++) { if (i < Contents.Length - 1 && Contents[i] == '\\' && IsEscapedChar(Contents[i+1])) { i++; } else if (Contents[i] == '"') { reached_end = true; break; } builder.Append(Contents[i]); } if (!reached_end) { throw new ParseException("Expected end of string; none found"); } _LexString = builder.ToString(); _LexChar = _LexString.Length > 0 ? _LexString[0] : '\0'; }*/ else if (Contents[0] == '\'') { char c = '\0'; if (Contents.Length < 3) { throw new ParseException("Expected end of character; none found"); } else if (Contents[1] == '\\') { if (Contents.Length < 4 || !IsEscapedChar(Contents[2])) { throw new ParseException("Expected end of character; none found"); } if (Contents[2] == 'r') { c = '\r'; } else if (Contents[2] == 'n') { c = '\n'; } else if (Contents[2] == 't') { c = '\t'; } else if (Contents[2] == 'v') { c = '\v'; } else if (Contents[2] == '\\') { c = '\\'; } else if (Contents[2] == '"') { c = '"'; } stripchars = 4; } else { c = Contents[1]; stripchars = 3; } _LexString = c.ToString(); _LexChar = c; _LastToken = LexToken.TOKEN_QUOTCHAR; } else if (CheckString(0, "/**")) { int endpoint = 0; for (int i=3; i<Contents.Length-1; i++) { if (Contents[i] == '\n') { LineNo++; } else if (Contents[i] == '*' && Contents[i+1] == '/') { endpoint = i+1; break; } } if (endpoint == 0) { throw new ParseException("Expected end of multi-line comment; found end of file"); } /* We have to factor one more character in because we're zero based */ endpoint++; _LastToken = LexToken.TOKEN_DOCBLOCK; _LexString = Contents.ToString().Substring(0, endpoint); _LexChar = _LexString[0]; stripchars = endpoint; } else { _LastToken = LexToken.TOKEN_CHARACTER; _LexString = Contents[0].ToString(); _LexChar = Contents[0]; stripchars = 1; } } /* Strip N chars */ if (stripchars > 0) { Contents.Remove(0, stripchars); } return _LastToken; }
private void PARSE_Function(LexToken tok, ParseWriter w) { string tag="", name; /* Get the return value */ if (MatchToken(LexToken.TOKEN_LABEL)) { tag = LexString; } if (MatchToken(LexToken.TOKEN_OPERATOR)) { /* Check if we're some sort of invalid function */ DiscardUntilCharOrComment('(', false); ParseParameters(null); if (tok == LexToken.TOKEN_STOCK) { ignore_block(); } else if (tok == LexToken.TOKEN_NATIVE || tok == LexToken.TOKEN_FORWARD) { if (MatchChar('=')) { DiscardUntilCharOrComment(';', false); } NeedChar(';'); } return; } /* Get the name */ NeedToken(LexToken.TOKEN_SYMBOL); name = LexString; PARSE_Function(tok, w, tag, name); }
private void PARSE_Function(LexToken tok, ParseWriter w, string tag, string name) { if (tok == LexToken.TOKEN_FORWARD) { w.BeginSection("forward"); } else if (tok == LexToken.TOKEN_NATIVE) { w.BeginSection("native"); } else if (tok == LexToken.TOKEN_STOCK) { w.BeginSection("stock"); } w.WritePair("name", name); if (tag.Length > 0) { w.WritePair("return", tag); } ParseParameters(w); if (tok == LexToken.TOKEN_STOCK) { ignore_block(); } else { /* Make sure there's a semicolon */ if (MatchChar('=')) { DiscardUntilCharOrComment(';', false); } NeedChar(';'); } w.EndSection(); }
private int GetTokenPrecedence(LexToken current) { if (current is BinaryOperatorLexToken) { if (BinOpPrecedence.ContainsKey(current.Value())) return BinOpPrecedence[current.Value()]; } return -1; }