public LLkParser(TokenStream lexer, int k_) { k = k_; TokenBuffer tokenBuf = new TokenBuffer(lexer); setTokenBuffer(tokenBuf); }
public static TomlTableArray TryApply(ITomlRoot root, TokenBuffer tokens) { if (!tokens.TryExpectAt(0, TokenType.LBrac)) { return null; } if (!tokens.TryExpectAt(1, TokenType.LCurly)) { return null; } return Apply(root, tokens); }
// Atom : Value | LParen Expression RParen internal static INode Produce(TokenBuffer tokens) { var value = Value.Produce(tokens); if (value != null) return value; // HACK if (tokens.Current is LeftParenthesis) tokens.SavePosition(); var lParen = tokens.GetTerminal<LeftParenthesis>(); if (lParen == null) return null; var expression = Expression.Produce(tokens); if (expression == null) { tokens.RestorePosition(); return null; } var rParen = tokens.GetTerminal<RightParenthesis>(); if (rParen == null) { throw new ParseException("Missing closing parenthesis"); } return expression; }
public Token Read() { var token = GetHead(); TokenBuffer.RemoveAt(0); return(PreviewsToken = token); }
private static TomlTableArray Apply(ITomlRoot root, TokenBuffer tokens) { tokens.ExpectAndConsume(TokenType.LBrac); tokens.ConsumeAllNewlines(); var arr = new TomlTableArray(root); TomlTable tbl = null; while ((tbl = InlineTableProduction.TryApply(root, tokens)) != null) { arr.Add(tbl); if (tokens.TryExpect(TokenType.Comma)) { tokens.Consume(); tokens.ConsumeAllNewlines(); } else { tokens.ConsumeAllNewlines(); tokens.Expect(TokenType.RBrac); } } tokens.ConsumeAllNewlines(); tokens.ExpectAndConsume(TokenType.RBrac); return(arr); }
private static TomlKey ApplyInternal(TokenBuffer tokens, bool required) { if (tokens.TryExpect(TokenType.BareKey) || tokens.TryExpect(TokenType.Integer)) { return(new TomlKey(tokens.Consume().value, TomlKey.KeyType.Bare)); } else if (tokens.TryExpect(TokenType.String)) { return(new TomlKey(tokens.Consume().value, TomlKey.KeyType.Basic)); } else if (tokens.TryExpect(TokenType.LiteralString)) { return(new TomlKey(tokens.Consume().value, TomlKey.KeyType.Literal)); } else if (required) { var t = tokens.Peek(); if (t.value == "=") { throw Parser.CreateParseError(t, "Key is missing."); } else { throw Parser.CreateParseError(t, $"Failed to parse key because unexpected token '{t.value}' was found."); } } else { return(new TomlKey(string.Empty)); } }
// Object names can be: // name // prefix.name // Property names can be: // name // attachedPropertyObject.name // prefix.attachedPropertyObject.name public static bool LooksLikeCompoundName(this TokenBuffer <LexerTokenType> tokenBuffer, out int length) { length = 0; var tokenType0 = tokenBuffer.LookAhead(0, LexerTokenType.Unknown); var tokenType1 = tokenBuffer.LookAhead(1, LexerTokenType.Unknown); var tokenType2 = tokenBuffer.LookAhead(2, LexerTokenType.Unknown); var tokenType3 = tokenBuffer.LookAhead(3, LexerTokenType.Unknown); var tokenType4 = tokenBuffer.LookAhead(4, LexerTokenType.Unknown); if (tokenType0 == LexerTokenType.Identifier) { length = 1; if (tokenType1 == LexerTokenType.Period && tokenType2 == LexerTokenType.Identifier) { length = 3; if (tokenType3 == LexerTokenType.Period && tokenType4 == LexerTokenType.Identifier) { length = 5; } } return(true); } return(false); }
/// <summary>User can override to do their own debugging /// </summary> protected internal virtual void setupDebugging(TokenStream lexer, TokenBuffer tokenBuf) { setDebugMode(true); // default parser debug setup is ParseView try { try { System.Type.GetType("javax.swing.JButton"); } catch (System.Exception) { System.Console.Error.WriteLine("Swing is required to use ParseView, but is not present in your CLASSPATH"); System.Environment.Exit(1); } System.Type c = System.Type.GetType("antlr.parseview.ParseView"); System.Reflection.ConstructorInfo constructor = c.GetConstructor(new System.Type[] { typeof(LLkDebuggingParser), typeof(TokenStream), typeof(TokenBuffer) }); constructor.Invoke(new object[] { this, lexer, tokenBuf }); } catch (System.Exception e) { System.Console.Error.WriteLine("Error initializing ParseView: " + e); System.Console.Error.WriteLine("Please report this to Scott Stanchfield, [email protected]"); System.Environment.Exit(1); } }
private static TomlTableArray Apply(ITomlRoot root, TokenBuffer tokens) { tokens.ExpectAndConsume(TokenType.LBrac); tokens.ConsumeAllNewlines(); var arr = new TomlTableArray(root); TomlTable tbl = null; while ((tbl = InlineTableProduction.TryApply(root, tokens)) != null) { arr.Add(tbl); if (tokens.TryExpect(TokenType.Comma)) { tokens.Consume(); tokens.ConsumeAllNewlines(); } else { tokens.ConsumeAllNewlines(); tokens.Expect(TokenType.RBrac); } } tokens.ConsumeAllNewlines(); tokens.ExpectAndConsume(TokenType.RBrac); return arr; }
public static IList<string> TryApply(TokenBuffer tokens) { if (!tokens.TryExpectAt(0, TokenType.LBrac)) { return null; } if (!tokens.TryExpectAt(1, TokenType.LBrac)) { return null; } return Apply(tokens); }
internal static Number Produce(TokenBuffer tokens) { var digits = Digits.Produce(tokens); if (digits != null) return new Number(digits); return null; }
// AddSub : MulDiv (AddSubOp MulDiv)* internal static INode Produce(TokenBuffer tokens) { var mulDiv = MulDiv.Produce(tokens); if (mulDiv == null) return null; tokens.SavePosition(); return BuildSubNodes(mulDiv, tokens); }
public static IList<TomlComment> TryParseAppendExpressionComments(Token lastExpressionToken, TokenBuffer tokens) { var comments = new List<TomlComment>(); while (tokens.TryExpect(TokenType.Comment) && tokens.Peek().line == lastExpressionToken.line) { comments.Add(new TomlComment(tokens.Consume().value, CommentLocation.Append)); } return comments; }
public PsiParser(ILexer lexer, CommonIdentifierIntern commonIdentifierIntern) { myCheckForInterrupt = new SeldomInterruptChecker(); myOriginalLexer = lexer; myCommonIdentifierIntern = commonIdentifierIntern; var tokenBuffer = new TokenBuffer(lexer); myLexer = new FilteringPsiLexer(tokenBuffer.CreateLexer()); myLexer.Start(); }
public static bool LooksLikeQualifiedName( this TokenBuffer <LexerTokenType> tokenBuffer, LexerTokenType qualifierDelimeter) { var tokenType0 = tokenBuffer.LookAhead(0, LexerTokenType.Unknown); var tokenType1 = tokenBuffer.LookAhead(1, LexerTokenType.Unknown); return(tokenType0 == LexerTokenType.Identifier && tokenType1 == qualifierDelimeter); }
// ExpRoot (MulDivOp ExpRoot)* internal static INode Produce(TokenBuffer tokens) { var expRoot = ExpRoot.Produce(tokens); if (expRoot == null) return null; tokens.SavePosition(); return BuildSubNodes(expRoot, tokens); }
private ExpresionData PrimaryExpresion(EcmaTokenizer token) { TokenBuffer buf = token.Current(); token.Next(); if (buf.Is(TokenType.Keyword, "this")) { return(new ExpresionData(ExpresionType.This)); } if (buf.Is(TokenType.Number)) { ExpresionData n = new ExpresionData(ExpresionType.Number); n.Sign = buf.Context; return(n); } if (buf.Is(TokenType.Null)) { return(new ExpresionData(ExpresionType.Null)); } if (buf.Is(TokenType.Bool)) { ExpresionData b = new ExpresionData(ExpresionType.Bool); b.Sign = buf.Context; return(b); } if (buf.Is(TokenType.Identify)) { ExpresionData identify = new ExpresionData(ExpresionType.Identify); identify.Name = buf.Context; return(identify); } if (buf.Is(TokenType.String)) { ExpresionData str = new ExpresionData(ExpresionType.String); str.Sign = buf.Context; return(str); } if (buf.Is(TokenType.Punctor, "(")) { ExpresionData p = Expresion(token); token.Current().Excepect(TokenType.Punctor, ")"); token.Next(); return(p); } throw new EcmaRuntimeException("Unknown token detected " + buf.Context + "(" + buf.Type.ToString() + ") on line " + buf.LineStart); }
// AddSubOp : AddOp | SubOp internal static AddSubOp Produce(TokenBuffer tokens) { var addOp = tokens.GetTerminal<AdditionOperator>(); if (addOp != null) return new AddSubOp(addOp); var subOp = tokens.GetTerminal<SubtractionOperator>(); if (subOp != null) return new AddSubOp(subOp); return null; }
public static void Main(string[] args) { try { HTMLLexer lexer = new HTMLLexer(new ByteBuffer(Console.OpenStandardInput())); TokenBuffer buffer = new TokenBuffer(lexer); HTMLParser parser = new HTMLParser(buffer); parser.document(); } catch (Exception e) { Console.Error.WriteLine("exception: " + e); Environment.Exit(1); } }
public static IList <TomlKey> Apply(TokenBuffer tokens) { tokens.ExpectAndConsume(TokenType.LBrac); tokens.ExpectAndConsume(TokenType.LBrac); var key = TableKeyProduction.Apply(tokens); tokens.ExpectAndConsume(TokenType.RBrac); tokens.ExpectAndConsume(TokenType.RBrac); return(key); }
// Value : Identifier | Number internal static Value Produce(TokenBuffer tokens) { var identifier = Identifier.Produce(tokens); if (identifier != null) return new Value(identifier); var number = Number.Produce(tokens); if (number != null) return new Value(number); return null; }
public TokenBuffer Tokenize(string input) { var buffer = new TokenBuffer(); foreach (var c in input) { var token = GetToken(c); if (token != null) buffer.Add(token); } buffer.Add(new EOF()); return buffer; }
public static IList<string> Apply(TokenBuffer tokens) { tokens.ExpectAndConsume(TokenType.LBrac); tokens.ExpectAndConsume(TokenType.LBrac); var key = TableKeyProduction.Apply(tokens); tokens.ExpectAndConsume(TokenType.RBrac); tokens.ExpectAndConsume(TokenType.RBrac); return key; }
// MulDivOp : MulOp | DivOp internal static MulDivOp Produce(TokenBuffer tokens) { var mulOp = tokens.GetTerminal<MultiplicationOperator>(); if (mulOp != null) return new MulDivOp(mulOp); var divOp = tokens.GetTerminal<DivisionOperator>(); if (divOp != null) return new MulDivOp(divOp); return null; }
private static TomlTableArray Apply(ITomlRoot root, TokenBuffer tokens) { tokens.ExpectAndConsume(TokenType.LBrac); tokens.ConsumeAllNewlines(); var prep = CommentProduction.TryParseComments(tokens, CommentLocation.Prepend); var arr = new TomlTableArray(root); while (true) { var tbl = InlineTableProduction.TryApply(root, tokens); if (tbl == null) { break; } if (prep != null) { tbl.AddComments(prep); prep = null; } arr.Add(tbl); if (tokens.TryExpect(TokenType.Comma)) { tokens.Consume(); tokens.ConsumeAllNewlines(); tbl.AddComments(CommentProduction.TryParseComments(tokens, CommentLocation.Append)); } else { break; } } tokens.ConsumeAllNewlines(); if (arr.Count > 0) { arr.Last().AddComments(CommentProduction.TryParseComments(tokens, CommentLocation.Append)); } else { arr.AddComments(prep); } tokens.ExpectAndConsume(TokenType.RBrac); arr.AddComments(CommentProduction.TryParseComments(tokens, CommentLocation.Append)); return(arr); }
// ExpRootOp : ExpOp | RootOp internal static ExpRootOp Produce(TokenBuffer tokens) { var expOp = tokens.GetTerminal<ExponentiationOperator>(); if (expOp != null) return new ExpRootOp(expOp); var rootOp = tokens.GetTerminal<RootExtractionOperator>(); if (rootOp != null) return new ExpRootOp(rootOp); return null; }
public static IList <TomlKey> TryApply(TokenBuffer tokens) { if (!tokens.TryExpectAt(0, TokenType.LBrac)) { return(null); } if (!tokens.TryExpectAt(1, TokenType.LBrac)) { return(null); } return(Apply(tokens)); }
public static IList<TomlComment> TryParsePreExpressionCommenst(TokenBuffer tokens) { var comments = new List<TomlComment>(); while (tokens.TryExpect(TokenType.Comment)) { comments.Add(new TomlComment(tokens.Consume().value, CommentLocation.Prepend)); tokens.ConsumeAllNewlines(); } tokens.ConsumeAllNewlines(); return comments; }
public static IList<TomlComment> TryParseComments(TokenBuffer tokens, CommentLocation location) { var comments = new List<TomlComment>(); while (tokens.TryExpect(TokenType.Comment)) { comments.Add(new TomlComment(tokens.Consume().value, location)); tokens.ConsumeAllNewlines(); } tokens.ConsumeAllNewlines(); return comments; }
public static TomlTableArray TryApply(ITomlRoot root, TokenBuffer tokens) { if (!tokens.TryExpectAt(0, TokenType.LBrac)) { return(null); } if (!tokens.TryExpectAt(1, TokenType.LCurly)) { return(null); } return(Apply(root, tokens)); }
public static IList <TomlComment> TryParsePreExpressionCommenst(TokenBuffer tokens) { var comments = new List <TomlComment>(); while (tokens.TryExpect(TokenType.Comment)) { comments.Add(new TomlComment(tokens.Consume().value, CommentLocation.Prepend)); tokens.ConsumeAllNewlines(); } tokens.ConsumeAllNewlines(); return(comments); }
public static IList <TomlComment> TryParseComments(TokenBuffer tokens, CommentLocation location) { var comments = new List <TomlComment>(); while (tokens.TryExpect(TokenType.Comment)) { comments.Add(new TomlComment(tokens.Consume().value, location)); tokens.ConsumeAllNewlines(); } tokens.ConsumeAllNewlines(); return(comments); }
public static IList<string> Apply(TokenBuffer tokens) { List<string> keyChain = new List<string>(); var key = KeyProduction.Apply(tokens); keyChain.Add(key); while (tokens.TryExpect(TokenType.Dot)) { tokens.Consume(); keyChain.Add(KeyProduction.TryApply(tokens)); } return keyChain; }
public static IList <TomlKey> Apply(TokenBuffer tokens) { List <TomlKey> keyChain = new List <TomlKey>(); var key = KeyProduction.Apply(tokens); keyChain.Add(key); while (tokens.TryExpect(TokenType.Dot)) { tokens.Consume(); keyChain.Add(KeyProduction.TryApply(tokens)); } return(keyChain); }
public static IList<string> Apply(TokenBuffer tokens) { tokens.ExpectAndConsume(TokenType.LBrac); IList<string> tableKeyChain = TableKeyProduction.Apply(tokens); tokens.ExpectAndConsume(TokenType.RBrac); if (!tokens.TryExpectAndConsume(TokenType.NewLine) && !tokens.TryExpectAndConsume(TokenType.Comment) && !tokens.End) { var msg = $"Expected newline after table specifier. " + $"Token of type '{tokens.Peek().type}' with value '{tokens.Peek().value}' on same line."; throw Parser.CreateParseError(tokens.Peek(), msg); } return tableKeyChain; }
public static IList <TomlKey> TryApply(TokenBuffer tokens) { var ictx = tokens.GetImaginaryContext(); if (!ictx.TryExpectAndConsume(TokenType.LBrac)) { return(null); } if (!ictx.TryExpectAndConsume(TokenType.LBrac)) { return(null); } return(Apply(tokens)); }
public static TomlTableArray TryApply(ITomlRoot root, TokenBuffer tokens) { var ictx = tokens.GetImaginaryContext(); if (!ictx.TryExpectAndConsume(TokenType.LBrac)) { return(null); } ictx.ConsumeAllNewlinesAndComments(); if (!ictx.TryExpect(TokenType.LCurly)) { return(null); } return(Apply(root, tokens)); }
// Digits : Digit+ internal static Digits Produce(TokenBuffer tokens) { var digitList = new List<Digit>(); var digit = tokens.GetTerminal<Digit>(); if (digit != null) { while (digit != null) { digitList.Add(digit); digit = tokens.GetTerminal<Digit>(); } return new Digits(digitList); } return null; }
public Token GetHead() { if (Disposed || (IsBufferEmpty && Ended)) { return(null); } if (TokenBuffer[0].Key == TokenSet.WhiteSpace || TokenBuffer[0].Key == TokenSet.Comment || TokenBuffer[0].Key == TokenSet.PreProcess) { TokenBuffer.RemoveAt(0); return(GetHead()); } return(CurrentToken = TokenBuffer[0]); }
// See if the name is followed by an equals.... public static bool LooksLikePropertyAssignment(this TokenBuffer <LexerTokenType> tokenBuffer) { int nameLength; if (tokenBuffer.LooksLikeCompoundName(out nameLength)) { var tokenTypeFollowing = tokenBuffer.LookAhead(nameLength, LexerTokenType.Unknown); if (tokenTypeFollowing == LexerTokenType.Equals) { return(true); } } return(false); }
public static IList <TomlKey> Apply(TokenBuffer tokens) { tokens.ExpectAndConsume(TokenType.LBrac); IList <TomlKey> tableKeyChain = TableKeyProduction.Apply(tokens); tokens.ExpectAndConsume(TokenType.RBrac); if (!tokens.TryExpectAndConsume(TokenType.NewLine) && !tokens.TryExpectAndConsume(TokenType.Comment) && !tokens.End) { var msg = $"Expected newline after table specifier. " + $"Token of type '{tokens.Peek().type}' with value '{tokens.Peek().value}' on same line."; throw Parser.CreateParseError(tokens.Peek(), msg); } return(tableKeyChain); }
private static INode BuildSubNodes(INode lhs, TokenBuffer tokens) { var addSubOp = AddSubOp.Produce(tokens); if (addSubOp == null) return lhs; var rhs = MulDiv.Produce(tokens); if (rhs == null) { tokens.RestorePosition(); return lhs; } lhs = new AddSub(lhs, addSubOp, rhs); return BuildSubNodes(lhs, tokens); }
private static INode BuildSubNodes(INode lhs, TokenBuffer tokens) { var mulDivOp = MulDivOp.Produce(tokens); if (mulDivOp == null) return lhs; var rhs = ExpRoot.Produce(tokens); if (rhs == null) { tokens.RestorePosition(); return lhs; } lhs = new MulDiv(lhs, mulDivOp, rhs); return BuildSubNodes(lhs, tokens); }
public static StatementBuilder <StatementType, ParserTokenType, LexerTokenType> ReadToEndOfLine( this StatementBuilder <StatementType, ParserTokenType, LexerTokenType> statement, TokenBuffer <LexerTokenType> tokenBuffer) { statement.Enable() .Accept(LexerTokenType.Whitespace, ParserTokenType.Whitespace) .Accept(LexerTokenType.Comment, ParserTokenType.Comment) .AggregateWhileNot(LexerTokenType.Newline, ParserTokenType.Unknown); if (tokenBuffer.CurrentToken != null) { statement.Enable() .Expect(LexerTokenType.Newline, ParserTokenType.Whitespace); } return(statement); }
// Identifier : Character+ internal static Identifier Produce(TokenBuffer tokens) { var characters = new List<Character>(); var character = tokens.GetTerminal<Character>(); if (character != null) { while (character != null) { characters.Add(character); character = tokens.GetTerminal<Character>(); } return new Identifier(characters); } return null; }
private static TomlValue ParseTomlValue(ITomlRoot root, TokenBuffer tokens) { if (tokens.TryExpect(TokenType.Integer)) { return(ParseTomlInt(root, tokens)); } else if (tokens.TryExpect(TokenType.Float)) { return(ParseTomlFloat(root, tokens)); } else if (tokens.TryExpect(TokenType.DateTime)) { return(TomlDateTime.Parse(root, tokens.Consume().value)); } else if (tokens.TryExpect(TokenType.Timespan)) { return(new TomlTimeSpan(root, TimeSpan.Parse(tokens.Consume().value, CultureInfo.InvariantCulture))); } else if (tokens.TryExpect(TokenType.String)) { return(ParseStringValue(root, tokens)); } else if (tokens.TryExpect(TokenType.LiteralString)) { return(ParseLiteralString(root, tokens)); } else if (tokens.TryExpect(TokenType.MultilineString)) { return(ParseMultilineString(root, tokens)); } else if (tokens.TryExpect(TokenType.MultilineLiteralString)) { return(ParseMultilineLiteralString(root, tokens)); } else if (tokens.TryExpect(TokenType.Bool)) { return(new TomlBool(root, bool.Parse(tokens.Consume().value))); } else if (tokens.TryExpect(TokenType.LBrac)) { return(ParseTomlArray(root, tokens)); } return(null); }
// ExpRoot : Unary ExpRootOp ExpRoot | Unary internal static INode Produce(TokenBuffer tokens) { var unary = Unary.Produce(tokens); if (unary == null) return null; tokens.SavePosition(); var expRootOp = ExpRootOp.Produce(tokens); if (expRootOp == null) return unary; var rhs = ExpRoot.Produce(tokens); if (rhs != null) return new ExpRoot(unary, expRootOp, rhs); tokens.RestorePosition(); return null; }
internal static Assignment Produce(TokenBuffer tokens) { var assignee = Identifier.Produce(tokens); if (assignee == null) //throw new ParseException("Assignment must start with a identifier for the assignee."); return null; var assignOp = tokens.GetTerminal<AssignmentOperator>(); if (assignOp == null) //throw new ParseException("Expected assignment operator."); return null; var assigner = Expression.Produce(tokens); if (assigner == null) throw new ParseException("Expected expression for the right side of the assignment."); return new Assignment(assignee, assigner); }
private void OpenChameleon() { Assertion.Assert(!myOpened, "!myOpened"); AssertSingleChild(); var service = PsiLanguageTypeExtensions.LanguageService(Language); Assertion.Assert(service != null, "service != null"); var buffer = GetTextAsBuffer(); var lexer = new TokenBuffer(service.GetPrimaryLexerFactory().CreateLexer(buffer)).CreateLexer(); var parser = (YamlParser)service.CreateParser(lexer, null, GetSourceFile()); var openedChameleon = parser.ParseDocument(); AssertTextLength(openedChameleon); DeleteChildRange(firstChild, lastChild); OpenChameleonFrom(openedChameleon); }
// Unary : AddSubOp? Atom internal static INode Produce(TokenBuffer tokens) { // HACK if (tokens.Current is AdditionOperator || tokens.Current is SubtractionOperator) tokens.SavePosition(); var op = AddSubOp.Produce(tokens); var atom = Atom.Produce(tokens); if (atom != null) { if (op != null && op.IsMinus) return new Unary(op, atom); return atom; } tokens.RestorePosition(); return null; }
public static TomlObject Apply(ITomlRoot root, TokenBuffer tokens) { var value = ParseTomlValue(root, tokens); if (value == null) { var t = tokens.Peek(); if (t.IsEmpty || t.IsEof || t.IsNewLine) { throw Parser.CreateParseError(t, "Value is missing."); } else { string msg = $"Expected a TOML value while parsing key value pair." + $" Token of type '{t.type}' with value '{t.value}' is invalid."; throw Parser.CreateParseError(t, msg); } } return value; }
public static TomlObject Apply(ITomlRoot root, TokenBuffer tokens) { var value = ParseTomlValue(root, tokens); if (value == null) { var t = tokens.Peek(); if (t.IsEmpty || t.IsEof || t.IsNewLine) { throw Parser.CreateParseError(t, "Value is missing."); } else { string msg = $"Expected a TOML value while parsing key value pair." + $" Token of type '{t.type}' with value '{t.value}' is invalid."; throw Parser.CreateParseError(t, msg); } } return(value); }
public static Tuple<string, TomlObject> Apply(ITomlRoot root, TokenBuffer tokens) { var key = KeyProduction.Apply(tokens); tokens.ExpectAndConsume(TokenType.Assign); var inlineTableArray = InlineTableArrayProduction.TryApply(root, tokens); if (inlineTableArray != null) { return new Tuple<string, TomlObject>(key, inlineTableArray); } var inlineTable = InlineTableProduction.TryApply(root, tokens); if (inlineTable != null) { return new Tuple<string, TomlObject>(key, inlineTable); } var value = ValueProduction.Apply(root, tokens); return Tuple.Create(key, value); }
public static TomlTable Apply(ITomlRoot root, TokenBuffer tokens) { TomlTable inlineTable = new TomlTable(root, TomlTable.TableTypes.Inline); tokens.ExpectAndConsume(TokenType.LCurly); if (!tokens.TryExpect(TokenType.RBrac)) { var kvp = KeyValuePairProduction.Apply(root, tokens); inlineTable.AddRow(kvp.Item1, kvp.Item2); while (tokens.TryExpect(TokenType.Comma)) { tokens.Consume(); kvp = KeyValuePairProduction.Apply(root, tokens); inlineTable.AddRow(kvp.Item1, kvp.Item2); } } tokens.ExpectAndConsume(TokenType.RCurly); return(inlineTable); }
public static TomlTable Apply(ITomlRoot root, TokenBuffer tokens) { TomlTable inlineTable = new TomlTable(root, TomlTable.TableTypes.Inline); tokens.ExpectAndConsume(TokenType.LCurly); if (!tokens.TryExpect(TokenType.RBrac)) { var kvp = KeyValuePairProduction.Apply(root, tokens); inlineTable.Add(kvp.Item1, kvp.Item2); while (tokens.TryExpect(TokenType.Comma)) { tokens.Consume(); kvp = KeyValuePairProduction.Apply(root, tokens); inlineTable.Add(kvp.Item1, kvp.Item2); } } tokens.ExpectAndConsume(TokenType.RCurly); return inlineTable; }
private void OpenChameleon() { Assertion.Assert(!myOpened, "!myOpened"); AssertSingleChild(); var service = Language.LanguageService(); Assertion.Assert(service != null, "service != null"); var buffer = GetTextAsBuffer(); var baseLexer = new YamlLexer(buffer, true, true) { currentLineIndent = myLexerIndent }; var lexer = new TokenBuffer(baseLexer).CreateLexer(); var parser = (YamlParser)service.CreateParser(lexer, null, GetSourceFile()); var openedChameleon = parser.ParseContent(myParserIndent, myExpectedIndent); AssertTextLength(openedChameleon); DeleteChildRange(firstChild, lastChild); OpenChameleonFrom(openedChameleon); }
public static Tuple <TomlKey, TomlObject> Apply(ITomlRoot root, TokenBuffer tokens) { var key = KeyProduction.Apply(tokens); tokens.ExpectAndConsume(TokenType.Assign); var inlineTableArray = InlineTableArrayProduction.TryApply(root, tokens); if (inlineTableArray != null) { return(new Tuple <TomlKey, TomlObject>(key, inlineTableArray)); } var inlineTable = InlineTableProduction.TryApply(root, tokens); if (inlineTable != null) { return(new Tuple <TomlKey, TomlObject>(key, inlineTable)); } var value = ValueProduction.Apply(root, tokens); return(Tuple.Create(key, value)); }
public LLkDebuggingParser(TokenBuffer tokenBuf, int k_) : base(tokenBuf, k_) { InitBlock(); }
public iCalParser(TokenBuffer tokenBuf) : this(tokenBuf,3) { }
protected iCalParser(TokenBuffer tokenBuf, int k) : base(tokenBuf, k) { initialize(); }