public static CompoundIdentifier Parse(MapLexer lexer) { var ident = new CompoundIdentifier { Filename = lexer.Token.Filename, LineNumber = lexer.Token.LineNumber, LinePosition = lexer.Token.LinePosition }; var text = lexer.Consume(TokenKind.Identifier, TokenKind.Keyword); if (lexer.Token.Kind == TokenKind.Colon) { ident.Prefix = text; lexer.Consume(TokenKind.Colon); ident.parts.Add(lexer.Consume(TokenKind.Identifier, TokenKind.Keyword)); } else { ident.parts.Add(text); } while (lexer.Token.Kind == TokenKind.Slash) { lexer.Advance(); ident.parts.Add(lexer.Consume(TokenKind.Identifier, TokenKind.Keyword)); } return(ident); }
private void ParseName(MapParserContext context, MapLexer lexer) { lexer.Consume(TokenKind.Keyword, "name"); context.MapList.Name = lexer.Consume(TokenKind.String); lexer.Consume(TokenKind.Semicolon); }
private void ParseMappingExtras(MapParserContext context, MapLexer lexer, ExpressiveMapping mapping) { // Consume the preamble lexer.Consume(TokenKind.LeftCurly); lexer.Consume(TokenKind.Keyword, "note"); // Grab the note, and attach it to the mapping var note = lexer.Consume(TokenKind.String); mapping.Notes.Add(note); // Consume the postamble lexer.Consume(TokenKind.Semicolon); lexer.Consume(TokenKind.RightCurly); }
private void ParseIgnore(MapParserContext context, MapLexer lexer) { lexer.Consume(TokenKind.Keyword, "ignore"); lexer.Consume(TokenKind.LeftCurly); while (lexer.Token.Kind == TokenKind.Identifier || lexer.Token.Kind == TokenKind.Keyword) { var ignoredId = CompoundIdentifier.Parse(lexer); var ignoredAttribute = context.Resolve(ignoredId); context.MapList.AddIgnored(ignoredAttribute); lexer.Consume(TokenKind.Semicolon); } lexer.Consume(TokenKind.RightCurly); }
private void ParseFunctionCall(MapLexer lexer, MapParserContext context, MappedDataExpression expression) { // We're sitting on the function name... expression.FunctionName = lexer.Consume(TokenKind.Identifier); // Start of the argument list lexer.Consume(TokenKind.LeftParen); // Parse an argument while (true) { ParseArgument(lexer, context, expression); if (lexer.Token.Kind != TokenKind.Comma) { break; } lexer.Consume(TokenKind.Comma); } // End of the argument list lexer.Consume(TokenKind.RightParen); }
private void ParseWith(MapParserContext context, MapLexer lexer) { // with cident = cident ...statement... lexer.Consume(TokenKind.Keyword, "with"); var alias = lexer.Consume(TokenKind.Identifier); lexer.Consume(TokenKind.Equals); var rhs = CompoundIdentifier.Parse(lexer); if (string.IsNullOrEmpty(rhs.Prefix)) { throw new ParserException(rhs, "Right-hand side of 'with' statement must have a prefix."); } // TODO - perform some sort of validation on the RHS - does it resolve to a model, etc context.Push(alias, rhs); ParseStatement(context, lexer); context.Pop(); }
private void ParseStatement(MapParserContext context, MapLexer lexer) { // Constructs: // examples ... // ignore ... // with ... // { statements } if (lexer.Token.Kind == TokenKind.Keyword) { if (lexer.Token.Text == "with") { ParseWith(context, lexer); return; } else if (lexer.Token.Text == "name") { ParseName(context, lexer); return; } else if (lexer.Token.Text == "examples") { ParseExamples(context, lexer); return; } else if (lexer.Token.Text == "ignore") { ParseIgnore(context, lexer); return; } else { throw new ParserException($"Unexpected keyword '{lexer.Token.Text}'.", lexer.Token); } } else if (lexer.Token.Kind == TokenKind.LeftCurly) { lexer.Consume(TokenKind.LeftCurly); while (lexer.Token.Kind != TokenKind.RightCurly) { ParseStatement(context, lexer); } lexer.Consume(TokenKind.RightCurly); return; } // Constructs: // ident = model(ident); // ident:ident = ident:ident; var lhs = CompoundIdentifier.Parse(lexer); lexer.Consume(TokenKind.Equals); var rhs = expressionParser.Parse(lexer, context); // A little special handling for the model function if (rhs.FunctionName == "model") { var model = rhs.Arguments.First().Model; if (model == null) { throw new ParserException("The 'model' function requires a model argument."); } context.AddModelAlias(lhs.Parts.First(), model); lexer.Consume(TokenKind.Semicolon); } else { var target = context.Resolve(lhs); var mapCount = context.MapList.Maps.Where(x => x.TargetAttribute.Equals(target)).Count(); var mapping = new ExpressiveMapping(IdNames.Substring(mapCount, 1), target, rhs); context.MapList.Maps.Add(mapping); if (lexer.Token.Kind == TokenKind.LeftCurly) { ParseMappingExtras(context, lexer, mapping); } else { lexer.Consume(TokenKind.Semicolon); } } }
private void ParseExamples(MapParserContext context, MapLexer lexer) { // Build up the definition, so we can later load all the examples. var definition = new MapParserExamplesDefinition(); // Parse the preamble lexer.Consume(TokenKind.Keyword, "examples"); lexer.Consume(TokenKind.LeftCurly); // Keep parsing model blocks until we run out while (lexer.Token.Kind == TokenKind.Identifier || lexer.Token.Kind == TokenKind.Keyword) { // Grab the model name, and create a model object that we'll populate // TODO - create a new model example class thingy var modelId = lexer.Consume(TokenKind.Identifier, TokenKind.Keyword); lexer.Consume(TokenKind.LeftCurly); var model = new MapParserExamplesModel(modelId); definition.Models.Add(model); // Process model directives until we run out while (lexer.Token.Kind == TokenKind.Keyword) { switch (lexer.Token.Text) { case "prefix": lexer.Consume(TokenKind.Keyword, "prefix"); lexer.Consume(TokenKind.Colon); model.Prefix = lexer.Consume(TokenKind.String); lexer.Consume(TokenKind.Semicolon); break; case "suffix": lexer.Consume(TokenKind.Keyword, "suffix"); lexer.Consume(TokenKind.Colon); model.Suffix = lexer.Consume(TokenKind.String); lexer.Consume(TokenKind.Semicolon); break; case "directory": lexer.Consume(TokenKind.Keyword, "directory"); lexer.Consume(TokenKind.Colon); model.Directories.Add(lexer.Consume(TokenKind.String)); lexer.Consume(TokenKind.Semicolon); break; default: throw new ParserException($"Unexpected keyword in examples block: '{lexer.Token.Text}'."); } } // Finish up the model block lexer.Consume(TokenKind.RightCurly); } // Finish up the examples block lexer.Consume(TokenKind.RightCurly); // Record what we found context.Examples.Add(definition); }