public static (Expression Expression, ExpressionWalker ExpressionWalker) GetExpressionWithWalker(string expr) { var walker = new ExpressionWalker(ExpressionLexer.Tokenize(expr)); var pexpr = ParseExpression(walker); walker.Reset(); return(pexpr, walker); }
public static ParserAction Parse(string code) { var walker = new ExpressionWalker(ExpressionLexer.Tokenize(code)); if (walker.Current.Token == ExpressionToken.Mod) { walker.NextOrThrow(); } var lines = new LineBuilder(code); lines.Lines.RemoveAt(0); return(Parse(walker, code, lines)); }
public void CompileAction(ParserAction action, OList <ParserAction> actions) { if (action.Consumed) { return; } switch (action.Token) { case ParserToken.Import: { var expr = (ImportExpression)action.Related.Single(); var type = expr.Type; var alias = expr.As; if (File.Exists(type)) { Assembly.LoadFile(type); Debug.WriteLine($"{type} was loaded successfully."); break; } Type foundtype; foreach (var asm in AppDomain.CurrentDomain.GetAssemblies()) { foundtype = asm.GetType(type); if (foundtype == null) { continue; } goto _found; } throw new ExpressionCompileException($"Unable to find type: {type}"); _found: Debug.WriteLine($"{type} was loaded successfully."); if (alias != null) { Context.Imports.AddType(foundtype, alias); } else { Context.Imports.AddType(foundtype); } break; } case ParserToken.Declaration: { var expr = (VariableDeclarationExpression)action.Related.Single(); var name = expr.Name.AsString(); //validate name { if (InterpreterOptions.BuiltinKeywords.Any(w => w.Equals(name, StringComparison.Ordinal))) { throw new ExpressionCompileException($"Variable named '{name}' is taken by the interpreter."); } } var right = expr.Right; var evaluation = EvaluateExpression(right); Context.Variables[name] = Data.Create(evaluation); break; } case ParserToken.Expression: { var line = action.RelatedLines.Single(); if (line.Metadata.Contains("ParserToken.Expression")) { break; } line.Metadata.Add("ParserToken.Expression"); line.MarkedForDeletion = false; //they are all true by default, well all lines that were found relevant to ParserAction var copy = line.Content; var ew = new ExpressionWalker(ExpressionLexer.Tokenize(copy)); var vars = Context.Variables; bool changed = false; int last_access_index = 0; //we reparse the line and handle all expressions. if (ew.HasNext) { do { _restart: if (changed) { changed = false; var cleanedCopy = new string(' ', last_access_index) + copy.Substring(last_access_index); ew = new ExpressionWalker(ExpressionLexer.Tokenize(cleanedCopy)); if (ew.Count == 0) { break; } } var current = ew.Current; //iterate all tokens of that line if (current.Token != ExpressionToken.Mod || !ew.HasNext) { continue; } var mod = ew.Current; current = ew.NextToken(); switch (current.Token) { case ExpressionToken.LeftParen: { //it is an expression. ew.NextOrThrow(); var expression = Expression.ParseExpression(ew); object val = EvaluateObject(expression, line); if (val is ReferenceData rd) //make sure references are unpacked { val = rd.UnpackReference(Context); } ew.IsCurrentOrThrow(ExpressionToken.RightParen); var emit = val is Data d?d.Emit() : val.ToString(); copy = copy .Remove(mod.Match.Index, ew.Current.Match.Index + 1 - mod.Match.Index) .Insert(mod.Match.Index, emit); last_access_index = mod.Match.Index + emit.Length; changed = true; goto _restart; } default: continue; } } while (ew.Next()); } line.Replace(copy + (copy.EndsWith("\n") ? "" : "\n")); break; } case ParserToken.ForeachLoop: { _compileForeach(action); break; } case ParserToken.Template: break; default: throw new ArgumentOutOfRangeException(); } }
private void _executeForeach(ForeachExpression expr, List <Line> relatedLines, Line baseLine) { var contents = relatedLines.Select(l => l.Content).ToArray(); var iterateThose = expr.Arguments.Arguments.Select(parseExpr).ToList(); unpackPackedArguments(); //get smallest index and iterate it. var min = iterateThose.Min(i => i.Count); var vars = Context.Variables; for (int i = 0; i < min; i++) { //set variables if (expr.Depth > 0) { vars[$"i{expr.Depth}"] = new NumberScalar(i); } else { vars["i"] = new NumberScalar(i); } for (int j = 0; j < iterateThose.Count; j++) { vars[$"__{j + 1 + expr.Depth * 100}__"] = iterateThose[j][i]; } var variables = new List <string>(); //a list of all added variables that will be cleaned after this i iteration. //now here we iterate contents and set all variables in it. for (var contentIndex = 0; contentIndex < contents.Length; contentIndex++) { var content = contents[contentIndex]; //iterate lines, one at a time // ReSharper disable once RedundantToStringCall var copy = content.ToString().Replace("|#", "#"); bool changed = false; int last_access_index = 0; const string HashtagExpressionRegex = @"(?<!\\)\#\((?:[^()]|(?<open>\()|(?<-open>\)))+(?(open)(?!))\)"; var hashtagExprs = Regex.Matches(copy, HashtagExpressionRegex, Regexes.DefaultRegexOptions).Cast <Match>().ToArray(); //replace all emit commands copy = ExpressionLexer.ReplaceRegex(copy, @"(?<!\\)\#([0-9]+)", match => { var key = $"__{match.Groups[1].Value}__"; if (hashtagExprs.Any(m => m.IsMatchNestedTo(match))) { //it is inside hashtagExpr #(...) return(key); } return(_emit(vars[key])); }); var ew = new ExpressionWalker(ExpressionLexer.Tokenize(copy, ExpressionToken.StringLiteral)); if (ew.HasNext) { do { _restart: if (changed) { changed = false; var cleanedCopy = new string(' ', last_access_index) + copy.Substring(last_access_index); ew = new ExpressionWalker(ExpressionLexer.Tokenize(cleanedCopy, ExpressionToken.StringLiteral)); } var current = ew.Current; //iterate all tokens of that line if (current.Token == ExpressionToken.Mod && ew.HasNext) { if (ew.HasBack && ew.PeakBack.Token == ExpressionToken.Escape) { continue; } var expr_ew = new ExpressionWalker(ExpressionLexer.Tokenize(copy.Substring(current.Match.Index))); //var offset = current.Match.Index; //var hashtag = expr_ew.Current; current = expr_ew.NextToken(); switch (current.Token) { case ExpressionToken.Foreach: var code = contents.SkipWhile(s => s != content).StringJoin(); var e = ForeachExpression.Parse(code); var foreachExpr = (ForeachExpression)e.Related[0]; foreachExpr.Depth = expr.Depth + 1; //no need to mark lines from e for deletion, they are already marked beforehand. _executeForeach(foreachExpr, e.RelatedLines, baseLine); contentIndex += e.RelatedLines.Count + 2 - 1; //first for the %foreach line, second for the closer %, -1 because we increment index by one on next iteration. goto _skipline; default: continue; } } if (current.Token == ExpressionToken.Hashtag && ew.HasNext) { if (ew.HasBack && ew.PeakBack.Token == ExpressionToken.Escape) { continue; } var offset = current.Match.Index; var expr_ew = new ExpressionWalker(ExpressionLexer.Tokenize(copy.Substring(current.Match.Index))); var hashtag = expr_ew.Current; current = expr_ew.NextToken(); switch (current.Token) { case ExpressionToken.Literal: //this is variable declaration %varname = expr var peak = expr_ew.PeakNext.Token; if (peak == ExpressionToken.Equal) { var e = VariableDeclarationExpression.Parse(expr_ew); var varname = e.Name.AsString(); if (!Context.Variables.ContainsKey(varname)) { variables.Add(varname); } CompileAction(new ParserAction(ParserToken.Declaration, new List <Expression>() { e }), new OList <ParserAction>(0)); goto _skipline; } break; case ExpressionToken.LeftParen: { //it is an expression. expr_ew.NextOrThrow(); var expression = Expression.ParseExpression(expr_ew); object val = EvaluateObject(expression, baseLine); if (val is ReferenceData rd) //make sure references are unpacked { val = rd.UnpackReference(Context); } expr_ew.IsCurrentOrThrow(ExpressionToken.RightParen); var emit = val is Data d?d.Emit() : val.ToString(); copy = copy .Remove(offset + hashtag.Match.Index, expr_ew.Current.Match.Index + 1 - hashtag.Match.Index) .Insert(offset + hashtag.Match.Index, emit); last_access_index = hashtag.Match.Index + emit.Length; changed = true; goto _restart; } case ExpressionToken.NumberLiteral: { if (expr_ew.HasNext && expr_ew.PeakNext.Token == ExpressionToken.LeftBracet) { //it is an indexer call. //todo indexer } else { //it is a simple emit var key = $"#{expr_ew.Current.Match.Value}"; object val = vars[$"__{expr_ew.Current.Match.Value}__"]; copy = Regex.Replace(copy, Regex.Escape(key), _emit(val)); changed = true; } goto _restart; } default: continue; } } //incase it is escaped, continue. } while (ew.Next()); } _nextline: //cleanup escapes copy = copy.Replace("\\#", "#"); baseLine.ReplaceOrAppend(copy + (copy.EndsWith("\n") ? "" : "\n")); _skipline :; } foreach (var variable in variables) { Context.Variables.Remove(variable); } } if (expr.Depth == 0) { Context.Variables.Remove("i"); } else { Context.Variables.Remove($"i{expr.Depth}"); } for (var i = 0; i < iterateThose.Count; i++) { Context.Variables.Remove($"__{i + 1 + expr.Depth * 100}__"); } if (!baseLine.ContentWasModified) { baseLine.MarkedForDeletion = true; } IList parseExpr(Expression arg) { var ev = EvaluateObject(arg, baseLine); if (ev is ReferenceData d) { ev = d.UnpackReference(Context); } if (ev is StringScalar ss) { return(ss.ToCharArray()); } if (ev is NetObject no) { ev = no.Value; } return((IList)ev); } void unpackPackedArguments() { //unpack PackedArguments for (var i = iterateThose.Count - 1; i >= 0; i--) { if (iterateThose[i] is PackedArguments pa) { iterateThose.InsertRange(i, pa.Objects.Select(o => (IList)o)); } } iterateThose.RemoveAll(it => it is PackedArguments); } }