/// <summary> /// Register the compiler plugin. /// </summary> /// <param name="plugin"></param> public void Register(CompilerPlugin plugin) { _allPlugins.Add(plugin); if (!plugin.IsEnabled) { return; } if (plugin.PluginType == "expr" && plugin.StartTokens.Length > 0) { foreach (var startToken in plugin.StartTokens) { var tokenPlugins = _pluginExprs.ContainsKey(startToken) ? _pluginExprs[startToken] : new List <CompilerPlugin>(); tokenPlugins.Add(plugin); _pluginExprs[startToken] = tokenPlugins; } } else if (plugin.PluginType == "lexer" && plugin.StartTokens.Length > 0) { var list = new List <CompilerPlugin>(); list.Add(plugin); foreach (var startToken in plugin.StartTokens) { _pluginLexTokens[startToken] = list; } } else if (plugin.PluginType == "token") { var tplugin = new TokenReplacePlugin(); var hasStartTokens = plugin.StartTokens != null && plugin.StartTokens.Length > 0; var list = new List <CompilerPlugin>(); plugin.Handler = tplugin; list.Add(plugin); if (hasStartTokens) { foreach (var startToken in plugin.StartTokens) { _pluginTokens[startToken] = list; } } if (plugin.TokenReplacements != null && plugin.TokenReplacements.Count > 0) { foreach (var replacements in plugin.TokenReplacements) { var alias = replacements[0]; var replaceWith = replacements[1]; tplugin.SetupReplacement(alias, replaceWith); if (!hasStartTokens) { _pluginTokens[alias] = list; } } } } }
private MatchResult IsGrammarMatchOnExpression(CompilerPlugin plugin) { // 5. Check Grammer. var args = new Dictionary <string, object>(); var peekCount = 0; var result = CheckExpressionMatches(plugin, plugin.Matches, args, peekCount, 0); result.Plugin = plugin; return(result); }
/// <summary> /// Validates the compiler plugin. /// </summary> /// <param name="plugin"></param> /// <returns></returns> public BoolMsgObj Validate(CompilerPlugin plugin) { var errors = new List <string>(); if (string.IsNullOrEmpty(plugin.Grammar)) { errors.Add("Grammar not supplied"); } if (string.IsNullOrEmpty(plugin.Name)) { errors.Add("Compiler plugin name not supplied"); } if (string.IsNullOrEmpty(plugin.FullName)) { errors.Add("Compiler plugin full name not supplied"); } if (plugin.StartTokens.Length == 0) { errors.Add("Start tokens not supplied"); } if (string.IsNullOrEmpty(plugin.PluginType)) { errors.Add("Plugin type not supplied"); } if (plugin.BuildExpr == null) { errors.Add("Plugin parse function not supplied"); } var success = errors.Count == 0; var message = ""; if (!success) { foreach (var msg in errors) { message += msg + "\r\n"; } } var result = new BoolMsgObj(errors, success, message); return(result); }
private void LoadDefaultProperties(CompilerPlugin plugin) { // 2. Assign all plugin properties plugin.Desc = this.GetOrDefaultString("desc", string.Empty); plugin.PluginType = this.GetOrDefaultString("type", "expr"); plugin.Author = this.GetOrDefaultString("author", "Kishore Reddy"); plugin.Company = this.GetOrDefaultString("company", "CodeHelix Solutions Inc."); plugin.Url = this.GetOrDefaultString("url", "http://www.codehelixsolutions.com"); plugin.Url2 = this.GetOrDefaultString("url2", "http://fluentscript.codeplex.com"); plugin.Doc = this.GetOrDefaultString("doc", "http://fluentscript.codeplex.com/documentation"); plugin.Version = this.GetOrDefaultString("version", "0.9.8.10"); plugin.IsStatement = this.GetOrDefaultBool("isStatement", true); plugin.IsEndOfStatementRequired = this.GetOrDefaultBool("isEOSRequired", true); plugin.IsEnabled = this.GetOrDefaultBool("isEnabled", true); plugin.IsSystemLevel = this.GetOrDefaultBool("isSystemLevel", false); plugin.IsAutoMatched = this.GetOrDefaultBool("isAutoMatched", false); plugin.IsAssignmentSupported = this.GetOrDefaultBool("isAssignmentSupported", false); }
private Expr ParseExpressionGrammar(Expr buildexpr, CompilerPlugin plugin, List <TokenMatch> matches, int peekCount) { var token = peekCount == 0 ? TokenIt.NextToken : TokenIt.Peek(peekCount); foreach (var match in matches) { if (match.TokenType == "@expr" && match.TokenPropValue == "block") { if (match.Ref == "buildexpr") { var blockExp = buildexpr as IBlockExpr; Parser.ParseBlock(blockExp); } } // Matched: increment. peekCount++; token = TokenIt.Peek(peekCount, false); } return(buildexpr); }
private MatchResult CheckMatchesForLexer(CompilerPlugin plugin, List <TokenMatch> matches, Dictionary <string, object> args, int peekCount, int matchCount) { return(null); }
private void LoadExamples(CompilerPlugin plugin) { // 3. Examples var examplesList = this.Ctx.Memory.Get<object>("examples") as LArray; if (examplesList != null && examplesList.Value != null && examplesList.Value.Count > 0) { var examples = new List<string>(); foreach (var lobj in examplesList.Value) { var example = lobj as LObject; var exampleText = example.GetValue().ToString(); examples.Add(exampleText); } plugin.Examples = examples.ToArray(); } }
/// <summary> /// Register the compiler plugin. /// </summary> /// <param name="plugin"></param> public void Register(CompilerPlugin plugin) { _allPlugins.Add(plugin); if (plugin.PluginType == "expr" && plugin.StartTokens.Length > 0) { foreach (var startToken in plugin.StartTokens) { var tokenPlugins = _pluginExprs.ContainsKey(startToken) ? _pluginExprs[startToken] : new List<CompilerPlugin>(); tokenPlugins.Add(plugin); _pluginExprs[startToken] = tokenPlugins; } } else if(plugin.PluginType == "token" ) { var tplugin = new TokenReplacePlugin(); var hasStartTokens = plugin.StartTokens != null && plugin.StartTokens.Length > 0; var list = new List<CompilerPlugin>(); plugin.Handler = tplugin; list.Add(plugin); if( hasStartTokens ) { foreach(var startToken in plugin.StartTokens) { _pluginTokens[startToken] = list; } } if (plugin.TokenReplacements != null && plugin.TokenReplacements.Count > 0) { foreach (var replacements in plugin.TokenReplacements) { var alias = replacements[0]; var replaceWith = replacements[1]; tplugin.SetupReplacement(alias, replaceWith); if(!hasStartTokens) { _pluginTokens[alias] = list; } } } } }
public MatchResult(bool success, CompilerPlugin plugin, IDictionary <string, object> args) { this.Args = args; this.Plugin = plugin; this.Success = success; }
private void LoadStartTokensAsMap(CompilerPlugin plugin) { // 4. Setup the start tokens. var tokens = new List<string>(); var map = this.Ctx.Memory.Get<object>("start_tokens") as LMap; // 5. Not start tokens supplied ? if (map != null && map.Value.Count != 0) { plugin.StartTokenMap = map.Value; foreach (var keyval in map.Value) { tokens.Add(keyval.Key); } plugin.StartTokens = tokens.ToArray(); } }
private void LoadStartTokensAsList(CompilerPlugin plugin) { if (!this.Ctx.Memory.Contains("start_tokens")) return; var list = this.Ctx.Memory.Get<object>("start_tokens") as LArray; List<string> tokens = null; // 5. Not start tokens supplied ? if (list != null && list.Value.Count != 0) { tokens = new List<string>(); for (var ndx = 0; ndx < list.Value.Count; ndx++) { var val = list.Value[ndx] as LString; tokens.Add(val.Value); } } if(tokens != null && tokens.Count > 0) plugin.StartTokens = tokens.ToArray(); }
private void LoadGrammar(CompilerPlugin plugin) { // 6. Parse the grammar plugin.Grammar = this.GetOrDefaultString("grammar_parse", ""); // 7a. check for empty if (!string.IsNullOrEmpty(plugin.Grammar)) { var parser = new GrammerParser(); plugin.Matches = parser.Parse(plugin.Grammar); } }
private MatchResult IsGrammarMatch(CompilerPlugin plugin) { // 5. Check Grammer. var args = new Dictionary<string, object>(); var peekCount = 0; var result = CheckMatches(plugin, plugin.Matches, args, peekCount); result.Plugin = plugin; return result; }
private MatchResult CheckMatches(CompilerPlugin plugin, List<TokenMatch> matches, Dictionary<string, object> args, int peekCount) { var isMatch = true; var token = peekCount == 0 ? this.TokenIt.NextToken : this.TokenIt.Peek(peekCount); foreach (var match in matches) { var incrementPeek = false; // Check 1: Group tokens ? if(match.IsGroup) { var submatches = ((TokenGroup) match).Matches; var result = CheckMatches(plugin, submatches, args, peekCount); if(match.IsRequired && !result.Success) { isMatch = false; break; } } // Check 2: starttoken? else if (match.TokenType == "@starttoken") { incrementPeek = true; } // Check 3a: Optional words with text else if (!match.IsRequired && match.Text != null && match.Text != token.Token.Text) { incrementPeek = false; } // Check 3b: Optional words matched else if (!match.IsRequired && match.IsMatchingValue(token.Token)) { incrementPeek = true; } // Check 4: Optional word not matched else if (!match.IsRequired && !match.IsMatchingValue(token.Token)) { incrementPeek = false; } // Check 5: Expected word else if (match.IsRequired && match.TokenType == null && match.Text == token.Token.Text) { incrementPeek = true; } // Check 6: check the type of n1 else if (match.IsMatchingType(token.Token)) { incrementPeek = true; } else { isMatch = false; break; } if (incrementPeek) { if (!string.IsNullOrEmpty(match.Name)) { args[match.Name] = token; if(match.TokenPropEnabled && match.TokenPropValue == "value") { var startToken = token.Token.Text; args[match.Name + "-value"] = plugin.StartTokenMap[startToken]; } } // Matched: increment. peekCount++; token = this.TokenIt.Peek(peekCount); } } var res = new MatchResult(isMatch, null, args); res.TokenCount = peekCount; return res; }
/// <summary> /// Validates the compiler plugin. /// </summary> /// <param name="plugin"></param> /// <returns></returns> public BoolMsgObj Validate(CompilerPlugin plugin) { var errors = new List<string>(); if(string.IsNullOrEmpty(plugin.Grammar)) errors.Add("Grammar not supplied"); if(string.IsNullOrEmpty(plugin.Name)) errors.Add("Compiler plugin name not supplied"); if(string.IsNullOrEmpty(plugin.FullName)) errors.Add("Compiler plugin full name not supplied"); if(plugin.StartTokens.Length == 0) errors.Add("Start tokens not supplied"); if(string.IsNullOrEmpty(plugin.PluginType)) errors.Add("Plugin type not supplied"); if(plugin.ParseExpr == null) errors.Add("Plugin parse function not supplied"); var success = errors.Count == 0; var message = ""; if (!success) { foreach (var msg in errors) message += msg + "\r\n"; } var result = new BoolMsgObj(errors, success, message); return result; }
private MatchResult CheckExpressionMatches(CompilerPlugin plugin, List <TokenMatch> matches, Dictionary <string, object> args, int peekCount, int matchCount) { var isMatch = true; var token = peekCount == 0 ? this.TokenIt.NextToken : this.TokenIt.Peek(peekCount); var totalMatched = matchCount; foreach (var match in matches) { var continueCheck = false; var trackNamedArgs = true; var valueMatched = false; // Termninators if (match.TokenType == "@exprTerminators" && (Terminators.ExpFlexibleEnd.ContainsKey(token.Token) || Terminators.ExpThenEnd.ContainsKey(token.Token)) ) { // Don't increment the peekcount isMatch = totalMatched >= plugin.TotalRequiredMatches; break; } // Check for ";" and EOF ( end of file/text ) if (token.Token == Tokens.Semicolon || token.Token == Tokens.EndToken) { isMatch = totalMatched >= plugin.TotalRequiredMatches; break; } // Check 1: Group tokens ? if (match.IsGroup) { var submatches = ((TokenGroup)match).Matches; var result = CheckExpressionMatches(plugin, submatches, args, peekCount, totalMatched); if (match.IsRequired && !result.Success) { isMatch = false; break; } if (result.Success) { peekCount = result.TokenCount; if (match.IsRequired) { totalMatched += result.TotalMatched; } } } // Check 2: starttoken? else if (match.TokenType == "@starttoken") { continueCheck = true; totalMatched++; } // Check 2a: tokenmap1 else if (match.TokenType == "@tokenmap1") { if (plugin.TokenMap1 == null || !plugin.TokenMap1.ContainsKey(token.Token.Text)) { isMatch = false; break; } continueCheck = true; totalMatched++; } else if (match.TokenType == "@tokenmap2") { if (plugin.TokenMap2 == null || !plugin.TokenMap2.ContainsKey(token.Token.Text)) { isMatch = false; break; } continueCheck = true; totalMatched++; } // Check 2c: "identSymbol" must exist else if (match.TokenType == "@identsymbol") { var symbolExists = this.Symbols.Contains(token.Token.Text); continueCheck = symbolExists; if (!continueCheck) { isMatch = false; break; } totalMatched++; } // Check 2c: "identSymbol" must exist else if (match.TokenType == "@singularsymbol") { var plural = token.Token.Text + "s"; var symbolExists = this.Symbols.Contains(plural); continueCheck = symbolExists; if (!continueCheck) { isMatch = false; break; } totalMatched++; } // Check 2d: paramlist = @word ( , @word )* parameter names else if (match.TokenType == "@paramnames") { var isvalidParamList = true; var maxParams = 10; var totalParams = 0; var paramList = new List <object>(); while (totalParams <= maxParams) { var token2 = this.TokenIt.Peek(peekCount, false); if (token2.Token == Tokens.Comma) { peekCount++; } else if (token2.Token.Kind == TokenKind.Ident) { paramList.Add(token2.Token.Text); peekCount++; } else { peekCount--; break; } totalParams++; } isMatch = isvalidParamList; continueCheck = isMatch; if (continueCheck) { trackNamedArgs = false; if (!string.IsNullOrEmpty(match.Name)) { args[match.Name] = token; args[match.Name + "Value"] = new LArray(paramList); } totalMatched++; } else { break; } } // Check 3a: Optional words with text else if (!match.IsRequired && match.Text != null && match.Text != token.Token.Text) { continueCheck = false; } // Check 3b: Optional words matched else if (!match.IsRequired && match.IsMatchingValue(token.Token)) { continueCheck = true; } // Check 4: Optional word not matched else if (!match.IsRequired && !match.IsMatchingValue(token.Token)) { continueCheck = false; } // Check 5a: Expected word else if (match.IsRequired && match.TokenType == null && match.Text == token.Token.Text) { continueCheck = true; totalMatched++; } // Check 5b: Expected word in list else if (match.IsRequired && match.TokenType == null && match.Values != null) { if (!match.IsMatchingValue(token.Token)) { isMatch = false; break; } continueCheck = true; valueMatched = true; totalMatched++; } // Check 6: check the type of n1 else if (match.IsMatchingType(token.Token)) { continueCheck = true; totalMatched++; } else { isMatch = false; break; } if (continueCheck) { if (!string.IsNullOrEmpty(match.Name) && trackNamedArgs) { args[match.Name] = token; if (match.TokenPropEnabled) { // 1. figure out which token map to use. var lookupmap = plugin.StartTokenMap; if (match.TokenType == "@tokenmap1") { lookupmap = plugin.TokenMap1; } else if (match.TokenType == "@tokenmap2") { lookupmap = plugin.TokenMap2; } // Case 1: Start token replacement value if (match.TokenPropValue == "value") { var startToken = token.Token.Text; args[match.Name + "Value"] = lookupmap[startToken]; } // Case 2: Token value else if (match.TokenPropValue == "tvalue") { LObject val = LObjects.Null; if (match.TokenType == "@number") { val = new LNumber((double)token.Token.Value); } else if (match.TokenType == "@time") { val = new LTime((TimeSpan)token.Token.Value); } else if (match.TokenType == "@word") { val = new LString((string)token.Token.Value); } else if (match.TokenType == "@starttoken") { val = new LString(token.Token.Text); } args[match.Name + "Value"] = val; } // Case 2: Token value else if (match.TokenPropValue == "tvaluestring") { LObject val = LObjects.Null; if (match.TokenType == "@number") { val = new LString(((double)token.Token.Value).ToString(CultureInfo.InvariantCulture)); } else if (match.TokenType == "@time") { val = new LString(((TimeSpan)token.Token.Value).ToString()); } else if (match.TokenType == "@starttoken") { val = new LString(token.Token.Text); } else if (match.TokenType == "@word") { val = new LString(token.Token.Text); } else if (match.TokenType == "@singularsymbol") { val = new LString(token.Token.Text); } args[match.Name + "Value"] = val; } } // matching values else if (valueMatched) { args[match.Name + "Value"] = token.Token.Text; } } // Matched: increment. peekCount++; token = this.TokenIt.Peek(peekCount, false); } } var res = new MatchResult(isMatch, null, args); res.TotalMatched = totalMatched; res.TokenCount = peekCount; return(res); }
private void LoadTokenReplacements(CompilerPlugin plugin) { if (!this.Ctx.Memory.Contains("tokens")) return; var array = this.Ctx.Memory.Get<object>("tokens") as LArray; var records = array.Value; var replacements = new List<string[]>(); foreach (var record in records) { var list = record as LArray; if (list != null) { var columns = list.Value; var alias = columns[0] as LString; var replacement = columns[1] as LString; if (alias != null && replacement != null) { replacements.Add(new string[2] { alias.Value, replacement.Value }); } } } plugin.TokenReplacements = replacements; }
private void SetupPlugin() { // 1. Create the meta plugin var plugin = new CompilerPlugin(); plugin.Name = this.Name; // 2. Load default properties such as desc, company, etc. this.LoadDefaultProperties(plugin); // 3. Load the examples this.LoadExamples(plugin); // 4. token replacements ? or expression plugin? if(plugin.PluginType == "expr") { plugin.ParseExpr = this.GetFunc("parse"); this.LoadStartTokensAsMap(plugin); this.LoadGrammar(plugin); } else if(plugin.PluginType == "token") { this.LoadGrammar(plugin); this.LoadStartTokensAsList(plugin); this.LoadTokenReplacements(plugin); if(this.Ctx.Memory.Contains("parse")) plugin.ParseExpr = this.GetFunc("parse"); } // 5. Finally register the plugin. this.Ctx.PluginsMeta.Register(plugin); }
/// <summary> /// After parsing is complete, register the alias. /// </summary> /// <param name="node"></param> public override void OnParseComplete(AstNode node) { var stmt = node as AliasStmt; //var plugin = new AliasTokenPlugin(stmt.Alias, stmt.Actual); //plugin.Init(_parser, _tokenIt); var plugin = new CompilerPlugin(); plugin.PluginType = "token"; plugin.TokenReplacements = new List<string[]>(); plugin.TokenReplacements.Add(new string[] { stmt.Alias, stmt.Actual.Text }); plugin.Precedence = 1; Ctx.PluginsMeta.Register(plugin); }
public MatchResult(bool success, CompilerPlugin plugin, IDictionary<string, object> args) { this.Args = args; this.Plugin = plugin; this.Success = success; }