public void ApplyOperators() { List <int[]> paths = FindTokenPaths(t => t.meta is DelimOp); paths.Sort((a, b) => { int comp; comp = b.Length.CompareTo(a.Length); if (comp != 0) { return(comp); } Context.Entry e = null; Token ta = GetTokenAt(tokens, a, ref e); Token tb = GetTokenAt(tokens, b, ref e); DelimOp da = ta.meta as DelimOp; DelimOp db = tb.meta as DelimOp; comp = da.order.CompareTo(db.order); if (comp == 0) { comp = ta.index.CompareTo(tb.index); } return(comp); }); for (int i = 0; i < paths.Count; ++i) { Context.Entry pathNode = null; Token t = GetTokenAt(tokens, paths[i], ref pathNode); DelimOp op = t.meta as DelimOp; Context.Entry opEntry = op.isSyntaxValid.Invoke(this, pathNode.tokens, paths[i][paths[i].Length - 1]); if (pathNode.tokenCount != pathNode.tokens.Count) { pathNode.tokenCount = pathNode.tokens.Count; } } }
public static object op_mod(Tokenizer tok, Context.Entry e, object scope) { op_BinaryArgs(tok, e, scope, out object left, out object right, out Type lType, out Type rType); do { if (lType == typeof(string)) { string format = left as string; List <object> args; if (rType != typeof(List <object>)) { args = new List <object>(); args.Add(right); } else { args = right as List <object>; } return(Format(format, args, scope, tok, e.tokens[0].index)); } if (lType == typeof(string) || rType == typeof(string)) { break; } if (CodeConvert.IsConvertable(lType) && CodeConvert.IsConvertable(rType)) { CodeConvert.TryConvert(ref left, typeof(double)); CodeConvert.TryConvert(ref right, typeof(double)); return(((double)left) % ((double)right)); } } while (false); tok.AddError(e.tokens[1], "unable to modulo " + lType + " and " + rType + " : " + left + " % " + right); return(e); }
public static object op_or_(Tokenizer tok, Context.Entry e, object scope) { object left, right; Type lType, rType; op_BinaryArgs(tok, e, scope, out left, out right, out lType, out rType); return(op_reduceToBoolean(left, lType) || op_reduceToBoolean(right, rType)); }
public object Resolve(Tokenizer tok, object scope) { if (index == -1 && length == -1) { return(meta); } if (meta == null) { throw new NullReferenceException(); } if (meta is string) { return(ToString((string)meta)); } TokenSubstitution ss = meta as TokenSubstitution; if (ss != null) { return(ss.value); } Delim d = meta as Delim; if (d != null) { return(d.text); } Context.Entry pce = meta as Context.Entry; if (pce != null) { return(pce.Resolve(tok, scope)); } throw new DecoderFallbackException(); }
public Entry GetEntry(List <Token> tokens, int startTokenIndex, object meta, Context.Entry parent = null) { Entry e = new Entry { context = this, tokens = tokens, tokenStart = startTokenIndex, sourceMeta = meta, parent = parent }; return(e); }
public bool IsContextEnding() { Context.Entry ctx = GetAsContextEntry(); if (ctx != null) { return(ctx.GetEndToken() == this); } return(false); }
protected string PrintTokenPaths(IList <int[]> paths) { return(paths.Join("\n", arr => { Context.Entry e = null; Token t = GetTokenAt(tokens, arr, ref e); return arr.Join(", ") + ":" + t + " @" + ParseError.FilePositionOf(t, rows); })); }
public static object op_gte(Tokenizer tok, Context.Entry e, object scope) { if (op_Compare(tok, e, scope, out int compareValue)) { return(compareValue >= 0); } return(e); }
public static object op_mul(Tokenizer tok, Context.Entry e, object scope) { object left, right; Type lType, rType; op_BinaryArgs(tok, e, scope, out left, out right, out lType, out rType); do { bool lString = lType == typeof(string); bool rString = rType == typeof(string); // if one of them is a string, there is some string multiplication logic to do! if (lString != rString) { string meaningfulString; double meaningfulNumber; if (lString) { if (!CodeConvert.IsConvertable(rType)) { break; } meaningfulString = left.ToString(); CodeConvert.TryConvert(ref right, typeof(double)); meaningfulNumber = (double)right; } else { if (!CodeConvert.IsConvertable(lType)) { break; } meaningfulString = right.ToString(); CodeConvert.TryConvert(ref left, typeof(double)); meaningfulNumber = (double)left; } StringBuilder sb = new StringBuilder(); for (int i = 0; i < meaningfulNumber; ++i) { sb.Append(meaningfulString); } meaningfulNumber -= (int)meaningfulNumber; int count = (int)(meaningfulString.Length * meaningfulNumber); if (count > 0) { sb.Append(meaningfulString.Substring(0, count)); } return(sb.ToString()); } if (CodeConvert.IsConvertable(lType) && CodeConvert.IsConvertable(rType)) { CodeConvert.TryConvert(ref left, typeof(double)); CodeConvert.TryConvert(ref right, typeof(double)); return(((double)left) * ((double)right)); } } while (false); tok.AddError(e.tokens[1], "unable to multiply " + lType + " and " + rType); return(e); }
public static object op_lte(Tokenizer tok, Context.Entry e, object scope) { int comp; if (op_Compare(tok, e, scope, out comp)) { return(comp <= 0); } return(e); }
// spaceship operator public static bool op_Compare(Tokenizer tok, Context.Entry e, object scope, out int compareValue) { op_BinaryArgs(tok, e, scope, out object left, out object right, out Type lType, out Type rType); if (lType == rType) { return(lType.TryCompare(left, right, out compareValue)); } compareValue = 0; tok.AddError(e.tokens[1].index, "can't operate (" + lType + ")" + left + " " + e.tokens[1] + " (" + rType + ")" + right); return(false); }
Token GetTokenAt(List <Token> currentPath, IList <int> index, ref Context.Entry lastPathNode) { Token t = currentPath[index[0]]; if (index.Count == 1) { return(t); } index = index.GetRange(1, index.Count - 1); lastPathNode = t.GetAsContextEntry(); return(GetTokenAt(lastPathNode.tokens, index, ref lastPathNode)); }
protected bool SkipComments(bool incrementAtLeastOnce = false) { Context.Entry e = incrementAtLeastOnce ? Context.Entry.None : null; do { if (e != null && !Increment()) { return(false); } e = Current.Token.GetAsContextEntry(); } while (e != null && e.IsComment()); return(true); }
public static void op_BinaryArgs(Tokenizer tok, Context.Entry e, object scope, out object left, out object right, out Type lType, out Type rType) { op_ResolveToken(tok, e.tokens[0], scope, out left, out lType); op_ResolveToken(tok, e.tokens[2], scope, out right, out rType); // upcast to double. all the math operations expect doubles only, for algorithm simplicity if (lType != typeof(string) && lType != typeof(double) && CodeConvert.IsConvertable(lType)) { CodeConvert.TryConvert(ref left, typeof(double)); lType = typeof(double); } if (rType != typeof(string) && rType != typeof(double) && CodeConvert.IsConvertable(rType)) { CodeConvert.TryConvert(ref right, typeof(double)); rType = typeof(double); } }
public static Context.Entry opinit_Binary(List <Token> tokens, Tokenizer tok, int index, string contextName) { Token t = tokens[index]; Context.Entry e = tokens[index].GetAsContextEntry(); if (e != null) { if (e.context.name != contextName) { throw new Exception(tok.AddError(t, "expected context: " + contextName + ", found " + e.context.name).ToString()); } return(e); } if (index - 1 < 0) { tok.AddError(t, "missing left operand"); return(null); } if (index + 1 >= tokens.Count) { tok.AddError(t, "missing right operand"); return(null); } Context foundContext; Context.allContexts.TryGetValue(contextName, out foundContext); if (foundContext == null) { throw new Exception(tok.AddError(t, "context '" + contextName + "' does not exist").ToString()); } Context.Entry parent = null; int pIndex; for (pIndex = 0; pIndex < tokens.Count; ++pIndex) { e = tokens[pIndex].GetAsContextEntry(); if (e != null && e.tokens == tokens) { parent = e; break; } } if (pIndex == index) { throw new Exception(tok.AddError(t, "parent context recursion").ToString()); } e = foundContext.GetEntry(tokens, index - 1, t.meta, parent); e.tokenCount = 3; t.meta = e; tokens[index] = t; tok.ExtractContextAsSubTokenList(e); return(e); }
public string GetAsSmallText() { Context.Entry e = GetAsContextEntry(); if (e != null) { if (IsContextBeginning()) { return(e.beginDelim.ToString()); } if (IsContextEnding()) { return(e.endDelim.ToString()); } } return(ToString()); }
public string FilePositionOf(Token token) { List <Context.Entry> traversed = new List <Context.Entry>(); while (!token.IsValid) { Context.Entry e = token.GetAsContextEntry(); if (e == null || traversed.IndexOf(e) >= 0) { return("???"); } traversed.Add(e); token = e.tokens[0]; } return(ParseError.FilePositionOf(token, rows)); }
public static object op_add(Tokenizer tok, Context.Entry e, object scope) { op_BinaryArgs(tok, e, scope, out object left, out object right, out Type lType, out Type rType); if (lType == typeof(string) || rType == typeof(string)) { return(left.ToString() + right.ToString()); } if (CodeConvert.IsConvertable(lType) && CodeConvert.IsConvertable(rType)) { CodeConvert.TryConvert(ref left, typeof(double)); CodeConvert.TryConvert(ref right, typeof(double)); return(((double)left) + ((double)right)); } tok.AddError(e.tokens[1], "unable to add " + lType + " and " + rType + " : " + left + " + " + right); return(e); }
public static string DebugPrint(IList <Token> tokens, int depth = 0, string indent = " ") { StringBuilder sb = new StringBuilder(); for (int i = 0; i < tokens.Count; ++i) { Token t = tokens[i]; Context.Entry e = t.GetAsContextEntry(); if (e != null) { if (e.tokens != tokens) { Context.Entry prevEntry = i > 0 ? tokens[i - 1].GetAsContextEntry() : null; if (prevEntry != null && prevEntry.tokens != tokens) { sb.Append(indent); } else { sb.Append("\n").Append(Show.Indent(depth + 1, indent)); } sb.Append(DebugPrint(e.tokens, depth + 1)). Append("\n").Append(Show.Indent(depth, indent)); } else { if (i == 0) { sb.Append(e.beginDelim); } else if (i == tokens.Count - 1) { sb.Append(e.endDelim); } else { sb.Append(" ").Append(e.sourceMeta).Append(" "); } } } else { sb.Append("'").Append(tokens[i].GetAsSmallText()).Append("'"); } } return(sb.ToString()); }
private void FinalTokenCleanup() { for (int i = 0; i < tokens.Count; ++i) { // any unfinished contexts must end. the last place they could end is the end of this string Context.Entry e = tokens[i].GetAsContextEntry(); if (e != null && e.tokenCount < 0) { e.tokenCount = tokens.Count - e.tokenStart; ExtractContextAsSubTokenList(e); if (e.context != CodeRules.CommentLine) // this is an error, unless it's a comment { errors.Add(new ParseError(tokens[i], rows, "missing closing token")); } } } }
internal void ExtractContextAsSubTokenList(Context.Entry entry) { if (entry.tokenCount <= 0) { throw new Exception("what just happened?"); } int indexWhereItHappens = entry.tokenStart; List <Token> subTokens = entry.tokens.GetRange(entry.tokenStart, entry.tokenCount); int index = subTokens.FindIndex(t => t.GetAsContextEntry() == entry); entry.RemoveTokenRange(entry.tokenStart, entry.tokenCount - 1); Token entryToken = subTokens[index]; entryToken.Invalidate(); entry.tokens[indexWhereItHappens] = entryToken; entry.tokens = subTokens; entry.tokenStart = 0; entry.tokenCount = subTokens.Count; }
protected bool GetMemberNameAndAssociatedType() { memberToken = Current.Token; if (SkipStructuredDelimiters(memberToken.GetAsDelimiter())) { memberToken.Invalidate(); return(true); } memberId = null; Context.Entry e = memberToken.GetAsContextEntry(); if (e != null) { if (dictionaryAdd == null) { if (e.IsText()) { memberId = e.GetText(); } else { AddError("unable to parse member (" + e.context.name + ") as member name for " + resultType); } } else { memberId = e.Resolve(tok, scope); // "dictionary member value will be resolved later"; } if (e.tokens == Current.tokens) { Current.tokenIndex += e.tokenCount - 1; } } else { memberId = memberToken.GetAsBasicToken(); } if (memberId == null) { memberToken.index = -1; memberValue = state; return(true); } memberValue = null; return(CalculateMemberTypeBasedOnName()); }
public static object op_pow(Tokenizer tok, Context.Entry e, object scope) { op_BinaryArgs(tok, e, scope, out object left, out object right, out Type lType, out Type rType); do { if (lType == typeof(string) || rType == typeof(string)) { break; } if (CodeConvert.IsConvertable(lType) && CodeConvert.IsConvertable(rType)) { CodeConvert.TryConvert(ref left, typeof(double)); CodeConvert.TryConvert(ref right, typeof(double)); return(Math.Pow((double)left, (double)right)); } } while (false); tok.AddError(e.tokens[1], "unable to exponent " + lType + " and " + rType + " : " + left + " ^^ " + right); return(e); }
public static object op_div(Tokenizer tok, Context.Entry e, object scope) { object left, right; Type lType, rType; op_BinaryArgs(tok, e, scope, out left, out right, out lType, out rType); do { if (lType == typeof(string) || rType == typeof(string)) { break; } if (CodeConvert.IsConvertable(lType) && CodeConvert.IsConvertable(rType)) { CodeConvert.TryConvert(ref left, typeof(double)); CodeConvert.TryConvert(ref right, typeof(double)); return(((double)left) / ((double)right)); } } while (false); tok.AddError(e.tokens[1], "unable to divide " + lType + " and " + rType + " : " + left + " / " + right); return(e); }
public override string ToString() { Context.Entry pce = meta as Context.Entry; if (pce != null) { Delim d = pce.sourceMeta as Delim; if (d != null) { return(d.ToString()); } if (IsValid) { return(ToString(pce.TextRaw)); } string output = pce.context.name; if (pce.IsText()) { output += "(" + pce.GetText() + ")"; } return(output); } return(Resolve(null, null).ToString()); }
List <int[]> FindTokenPaths(Func <Token, bool> predicate, bool justOne = false) { if (tokens.Count == 0) { return(new List <int[]>()); } List <List <Token> > path = new List <List <Token> >(); List <int> position = new List <int>(); List <int[]> paths = new List <int[]>(); path.Add(tokens); position.Add(0); while (position[position.Count - 1] < path[path.Count - 1].Count) { List <Token> currentTokens = path[path.Count - 1]; int currentIndex = position[position.Count - 1]; Token token = currentTokens[currentIndex]; if (predicate(token)) { paths.Add(position.ToArray()); } Context.Entry e = token.GetAsContextEntry(); bool incremented = false; if (e != null) { if (currentTokens != e.tokens) { position.Add(0); path.Add(e.tokens); if (justOne) { break; } currentIndex = position[position.Count - 1]; currentTokens = path[path.Count - 1]; incremented = true; } } if (!incremented) { do { position[position.Count - 1] = ++currentIndex; if (currentIndex >= currentTokens.Count) { position.RemoveAt(position.Count - 1); path.RemoveAt(path.Count - 1); if (position.Count <= 0) { break; } currentIndex = position[position.Count - 1]; position[position.Count - 1] = ++currentIndex; currentTokens = path[path.Count - 1]; } } while (currentIndex >= currentTokens.Count); } if (position.Count <= 0) { break; } } return(paths); }
private void HandleDelimiter(Delim delim, ref int index, List <Context.Entry> contextStack, ref Context currentContext, Context defaultContext) { Token delimToken = new Token(delim, index, delim.text.Length); if (delim.parseRule != null) { ParseResult pr = delim.parseRule.Invoke(str, index); if (pr.IsError && errors != null) { pr.error.OffsetBy(delimToken.index, rows); errors.Add(pr.error); } if (pr.replacementValue != null) { delimToken.length = pr.lengthParsed; delimToken.meta = new TokenSubstitution(str, pr.replacementValue); } index += pr.lengthParsed - 1; } else { index += delim.text.Length - 1; } DelimCtx dcx = delim as DelimCtx; Context.Entry endedContext = null; if (dcx != null) { if (contextStack.Count > 0 && dcx.Context == currentContext && dcx.isEnd) { endedContext = contextStack[contextStack.Count - 1]; endedContext.endDelim = dcx; delimToken.meta = endedContext; endedContext.tokenCount = (tokens.Count - endedContext.tokenStart) + 1; contextStack.RemoveAt(contextStack.Count - 1); if (contextStack.Count > 0) { currentContext = contextStack[contextStack.Count - 1].context; } else { currentContext = defaultContext; } } if (endedContext == null && dcx.isStart) { Context.Entry parentCntx = (contextStack.Count > 0) ? contextStack[contextStack.Count - 1] : null; Context.Entry newContext = dcx.Context.GetEntry(tokens, tokens.Count, str, parentCntx); newContext.beginDelim = dcx; currentContext = dcx.Context; delimToken.meta = newContext; contextStack.Add(newContext); } } tokens.Add(delimToken); if (endedContext != null) { ExtractContextAsSubTokenList(endedContext); } }
public static object op_and(Tokenizer tok, Context.Entry e, object scope) { op_BinaryArgs(tok, e, scope, out object left, out object right, out Type lType, out Type rType); return(op_reduceToBoolean(left, lType) && op_reduceToBoolean(right, rType)); }
public static object op_asn(Tokenizer tok, Context.Entry e, object scope) { return("="); }
public static void op_ResolveToken(Tokenizer tok, Token token, object scope, out object value, out Type type) { value = token.Resolve(tok, scope); type = (value != null) ? value.GetType() : null; if (scope == null || type == null) { return; } // no scope, or no data, easy. we're done. string name = value as string; if (name == null) // data not a string (can't be a reference from scope), also easy. done. { List <object> args = value as List <object>; if (args != null) { for (int i = 0; i < args.Count; ++i) { bool remove = false; switch (args[i]) { case ",": remove = true; break; } if (remove) { args.RemoveAt(i--); } else { op_ResolveToken(tok, new Token(args[i], -1, -1), scope, out value, out type); args[i] = value; } } value = args; type = args.GetType(); } return; } Context.Entry e = token.GetAsContextEntry(); if (e != null && e.IsText()) { return; } // data is explicitly meant to be a string, done. switch (name) { case "null": value = null; type = null; return; case "true": value = true; type = typeof(bool); return; case "false": value = false; type = typeof(bool); return; } // otherwise, we search for the data within the given context Type scopeType = scope.GetType(); KeyValuePair <Type, Type> dType = scopeType.GetIDictionaryType(); if (dType.Key != null) { IDictionary dict = scope as IDictionary; if (dType.Key == typeof(string) && (name.StartsWith(Parser.Wildcard) || name.EndsWith(Parser.Wildcard))) { MethodInfo getKey = null; foreach (var kvp in dict) { if (getKey == null) { getKey = kvp.GetType().GetProperty("Key").GetGetMethod(); } string memberName = getKey.Invoke(kvp, null) as string; if (Parser.IsWildcardMatch(memberName, name)) { name = memberName; break; } } } if (dict.Contains(name)) { value = dict[name]; } type = (value != null) ? value.GetType() : null; return; } if (name.StartsWith(Parser.Wildcard) || name.EndsWith(Parser.Wildcard)) { FieldInfo[] fields = scopeType.GetFields(); string[] names = Array.ConvertAll(fields, f => f.Name); int index = Parser.FindIndexWithWildcard(names, name, false); if (index >= 0) { value = fields[index].GetValue(scope); type = (value != null) ? value.GetType() : null; return; } PropertyInfo[] props = scopeType.GetProperties(); names = Array.ConvertAll(props, p => p.Name); index = Parser.FindIndexWithWildcard(names, name, false); if (index >= 0) { value = props[index].GetValue(scope); type = (value != null) ? value.GetType() : null; return; } } else { FieldInfo field = scopeType.GetField(name); if (field != null) { value = field.GetValue(scope); type = (value != null) ? value.GetType() : null; return; } PropertyInfo prop = scopeType.GetProperty(name); if (prop != null) { value = prop.GetValue(scope); type = (value != null) ? value.GetType() : null; return; } } }