private static ParsedBlock ParseCodeBlockWithAttributes(Tokenizer tokenizer, BlockType type) { ParseException.ExpectToken(tokenizer.Peek(), TokenType.AttrStart, "["); tokenizer.Take(); var attrs = new AttributeDict(); while (tokenizer.Peek().type != TokenType.EndOfFile) { tokenizer.SkipWhiteSpace(); var token = tokenizer.Peek(); if (token.type == TokenType.AttrEnd) { tokenizer.Take(); break; } var key = ExpectIdentifierOrString(tokenizer); string value = null; tokenizer.SkipWhiteSpace(); token = tokenizer.Peek(); if (token.type == TokenType.Equal) { tokenizer.Take(); tokenizer.SkipWhiteSpace(); value = ExpectIdentifierOrString(tokenizer); } tokenizer.SkipWhiteSpace(); token = tokenizer.Peek(); if (token.type == TokenType.Comma || token.type == TokenType.AttrEnd) { tokenizer.Take(); } else { throw new ParseException(token, "Expect , or ]"); } attrs.Add(UnQuote(key.Trim()), value == null ? null : UnQuote(value.Trim())); if (token.type == TokenType.AttrEnd) { break; } } return(ParseCodeBlock(tokenizer, type, attrs)); }
public static AttributeDict Parse([Nullable] ActionAST action, string s, char separator, Grammar g) { AttributeDict dict = new AttributeDict(); IList <System.Tuple <string, int> > decls = SplitDecls(s, separator); foreach (System.Tuple <string, int> decl in decls) { if (decl.Item1.Trim().Length > 0) { Attribute a = ParseAttributeDef(action, decl, g); dict.Add(a); } } return(dict); }
protected virtual void CheckLocalConflictingDeclarations([NotNull] Rule r, [Nullable] AttributeDict attributes, [Nullable] AttributeDict referenceAttributes, [NotNull] ErrorType errorType) { if (attributes == null || referenceAttributes == null) { return; } ISet <string> conflictingKeys = attributes.Intersection(referenceAttributes); foreach (string key in conflictingKeys) { errMgr.GrammarError( errorType, g.fileName, attributes.Get(key).token != null ? attributes.Get(key).token : ((GrammarAST)r.ast.GetChild(0)).Token, key, r.name); } }
protected virtual void CheckDeclarationRuleConflicts([NotNull] Rule r, [Nullable] AttributeDict attributes, [NotNull] ICollection <string> ruleNames, [NotNull] ErrorType errorType) { if (attributes == null) { return; } foreach (Attribute attribute in attributes.attributes.Values) { if (ruleNames.Contains(attribute.name)) { errMgr.GrammarError( errorType, g.fileName, attribute.token != null ? attribute.token : ((GrammarAST)r.ast.GetChild(0)).Token, attribute.name, r.name); } } }
private static ParsedBlock ParseCodeBlock(Tokenizer tokenizer, BlockType type, AttributeDict attributes) { ParseException.ExpectToken(tokenizer.Peek(), TokenType.BlockStart, "<|"); var startToken = tokenizer.Take(); var sb = new StringBuilder(); var matchFound = false; while (tokenizer.Peek().type != TokenType.EndOfFile) { var token = tokenizer.Peek(); if (token.type == TokenType.CommentStart) { sb.Append(tokenizer.TakeComment()); continue; } if (token.type == TokenType.Quote) { sb.Append(tokenizer.TakeQuoted()); continue; } tokenizer.Take(); if (token.type == TokenType.BlockEnd) { matchFound = true; break; } sb.Append(token.text); } if (!matchFound) { throw new ParseException(startToken, "Unpaired block start <|"); } tokenizer.SkipWhiteSpace(); ParseException.ExpectToken(tokenizer.Peek(), new[] { TokenType.NewLine, TokenType.EndOfFile }, "new line or end of file after |>"); tokenizer.Take(); return(new ParsedBlock { type = type, attributes = attributes, content = sb.ToString() }); }
public void AddAllAttributes(AttributeDict otherDict) { attributes.AddAll(otherDict); }
public Character() { attributes = new AttributeDict(); }