/********* ** Private methods *********/ /// <summary>Get a new string with tokens substituted.</summary> /// <param name="context">Provides access to contextual tokens.</param> /// <param name="result">The input string with tokens substituted.</param> /// <param name="isReady">Whether all tokens in the <paramref name="result"/> have been replaced.</param> private void GetApplied(IContext context, out string result, out bool isReady) { bool allReplaced = true; StringBuilder str = new StringBuilder(); foreach (ILexToken lexToken in this.LexTokens) { switch (lexToken) { case LexTokenToken lexTokenToken: TokenName name = new TokenName(lexTokenToken.Name, lexTokenToken.InputArg?.Text); IToken token = context.GetToken(name, enforceContext: true); if (token != null) { str.Append(token.GetValues(name).FirstOrDefault()); } else { allReplaced = false; str.Append(lexToken.Text); } break; default: str.Append(lexToken.Text); break; } } result = str.ToString(); isReady = allReplaced; }
/********* ** Public methods *********/ /// <summary>Construct an instance.</summary> /// <param name="raw">The raw string before token substitution.</param> /// <param name="tokenContext">The available token context.</param> public TokenString(string raw, IContext tokenContext) { this.Raw = raw.Trim(); int tokensFound = 0; foreach (Match match in TokenString.TokenPattern.Matches(raw)) { tokensFound++; string rawToken = match.Groups[1].Value.Trim(); if (TokenName.TryParse(rawToken, out TokenName name)) { if (tokenContext.Contains(name, enforceContext: false)) { this.Tokens.Add(name); } else { this.InvalidTokens.Add(rawToken); } } else { this.InvalidTokens.Add(rawToken); } } this.IsSingleTokenOnly = tokensFound == 1 && TokenString.TokenPattern.Replace(this.Raw, "", 1) == ""; }
/********* ** Public methods *********/ /// <summary>Construct an instance.</summary> /// <param name="raw">The raw string before token substitution.</param> /// <param name="tokenContext">The available token context.</param> public TokenString(string raw, IContext tokenContext) { // get raw value this.Raw = raw?.Trim(); if (string.IsNullOrWhiteSpace(this.Raw)) { return; } // extract tokens int tokensFound = 0; foreach (Match match in TokenString.TokenPattern.Matches(raw)) { tokensFound++; string rawToken = match.Groups[1].Value.Trim(); if (TokenName.TryParse(rawToken, out TokenName name)) { if (tokenContext.Contains(name, enforceContext: false)) { this.Tokens.Add(name); } else { this.InvalidTokens.Add(rawToken); } } else { this.InvalidTokens.Add(rawToken); } } this.IsSingleTokenOnly = tokensFound == 1 && TokenString.TokenPattern.Replace(this.Raw, "", 1) == ""; this.IsMutable = this.Tokens.Any(); }
public Token(TokenName name, string value, int line, int column) { Name = name; Value = value; Line = line; Column = column; }
public IEnumerable <Token> GetTokens(string input) { string trimmedInput = TrimInput(input); var lineNumber = 1; int currentPosition = 0; var lineStart = 0; var regex = _tokensRegexProvider.GetRegex(); var match = regex.Match(trimmedInput); while (match.Success) { var type = GetTokenName(match); if (type == TokenName.NewLine) { lineStart = currentPosition; lineNumber++; } else if (!TokenName.IsSkippable(type)) { var value = match.Groups[type.Name]?.Value; var nextTokenName = GetTokenName(match.NextMatch()); type = _keywordTokenizer.TransformToIdentifierIfNeccessary(type, nextTokenName, value); yield return(new Token(type, value, lineNumber, match.Index - lineStart)); } currentPosition = match.Index + match.Length; match = regex.Match(trimmedInput, currentPosition); } if (currentPosition != trimmedInput.Length) { throw new UnexpectedCharacterException(trimmedInput[currentPosition], lineNumber); } yield return(new Token(TokenName.EndOfFile, string.Empty, lineNumber, currentPosition - lineStart)); }
private Expression GetComparationExpression(TokenName tokenName, Expression left, Expression right) { switch (tokenName) { case TokenName.GREATER: return(Expression.GreaterThan(left, right)); case TokenName.LESS: return(Expression.LessThan(left, right)); case TokenName.GREATER_EQUAL: return(Expression.GreaterThanOrEqual(left, right)); case TokenName.LESS_EQUAL: return(Expression.LessThanOrEqual(left, right)); case TokenName.ISEQUAL: return(Expression.Equal(left, right)); case TokenName.NOT_EQUAL: return(Expression.NotEqual(left, Expression.Constant(null))); default: throw new ApplicationException("Unknown COMPARISON_OPERATOR: " + tokenName); } }
public Token(TokenName name = TokenName.DEFAULT, string value = "", int row = 0, int col = 0) { this.Name = name; this.Value = value; this.Row = row; this.Col = col; }
public TokenName TransformToIdentifierIfNeccessary(TokenName tokenName, TokenName nextTokenName, string value) { var trimmedValue = value.Trim('"'); return(IsKeyword(tokenName, nextTokenName, trimmedValue) ? new TokenName(trimmedValue) : tokenName); }
public void NamePosition() { Tokenizer t = new Tokenizer(StringToStream(" /Name ")); TokenName n = t.GetToken() as TokenName; Assert.NotNull(n); Assert.True(n.Value == "Name"); Assert.True(t.GetToken() is TokenEmpty); }
public void NameEscaped2() { Tokenizer t = new Tokenizer(StringToStream("/A#20#20#20B")); TokenName n = t.GetToken() as TokenName; Assert.NotNull(n); Assert.True(n.Value == "A B"); Assert.True(t.GetToken() is TokenEmpty); }
public void NameLikeNumber() { Tokenizer t = new Tokenizer(StringToStream("/1.2")); TokenName n = t.GetToken() as TokenName; Assert.NotNull(n); Assert.True(n.Value == "1.2"); Assert.True(t.GetToken() is TokenEmpty); }
public void NameSpecialChars() { Tokenizer t = new Tokenizer(StringToStream("/A;_-*B?")); TokenName n = t.GetToken() as TokenName; Assert.NotNull(n); Assert.True(n.Value == "A;_-*B?"); Assert.True(t.GetToken() is TokenEmpty); }
public void NameZeroLength2() { Tokenizer t = new Tokenizer(StringToStream("/ ")); TokenName n = t.GetToken() as TokenName; Assert.NotNull(n); Assert.True(n.Value == ""); Assert.True(t.GetToken() is TokenEmpty); }
/** * @brief build dictionary of internal functions from an interface. * @param iface = interface with function definitions * @param inclSig = true: catalog by name with arg sig, eg, llSay(integer,string) * false: catalog by simple name only, eg, state_entry * @returns dictionary of function definition tokens */ public InternalFuncDict(Type iface, bool inclSig) : base(false) { /* * Loop through list of all methods declared in the interface. */ System.Reflection.MethodInfo[] ifaceMethods = iface.GetMethods(); foreach (System.Reflection.MethodInfo ifaceMethod in ifaceMethods) { string key = ifaceMethod.Name; /* * Only do ones that begin with lower-case letters... * as any others can't be referenced by scripts */ if ((key[0] < 'a') || (key[0] > 'z')) { continue; } try { /* * Create a corresponding TokenDeclVar struct. */ System.Reflection.ParameterInfo[] parameters = ifaceMethod.GetParameters(); TokenArgDecl argDecl = new TokenArgDecl(null); for (int i = 0; i < parameters.Length; i++) { System.Reflection.ParameterInfo param = parameters[i]; TokenType type = TokenType.FromSysType(null, param.ParameterType); TokenName name = new TokenName(null, param.Name); argDecl.AddArg(type, name); } TokenDeclVar declFunc = new TokenDeclVar(null, null, null); declFunc.name = new TokenName(null, key); declFunc.retType = TokenType.FromSysType(null, ifaceMethod.ReturnType); declFunc.argDecl = argDecl; /* * Add the TokenDeclVar struct to the dictionary. */ this.AddEntry(declFunc); } catch (Exception except) { string msg = except.ToString(); int i = msg.IndexOf("\n"); if (i > 0) { msg = msg.Substring(0, i); } Console.WriteLine("InternalFuncDict*: {0}: {1}", key, msg); ///??? IGNORE ANY THAT FAIL - LIKE UNRECOGNIZED TYPE ???/// } } }
private static TokenArgDecl GetArgDecl(ParameterInfo[] parameters) { TokenArgDecl argDecl = new TokenArgDecl(null); foreach(ParameterInfo pi in parameters) { TokenType type = TokenType.FromSysType(null, pi.ParameterType); TokenName name = new TokenName(null, pi.Name); argDecl.AddArg(type, name); } return argDecl; }
/********* ** Private methods *********/ /// <summary>Get a new string with tokens substituted.</summary> /// <param name="raw">The raw string before token substitution.</param> /// <param name="context">Provides access to contextual tokens.</param> private string Apply(string raw, IContext context) { return(TokenString.TokenPattern.Replace(raw, match => { TokenName name = TokenName.Parse(match.Groups[1].Value); IToken token = context.GetToken(name, enforceContext: true); return token != null ? token.GetValues(name).FirstOrDefault() : match.Value; })); }
/// <summary>Parse a boolean <see cref="PatchConfig.Enabled"/> value from a string which can contain tokens, and validate that it's valid.</summary> /// <param name="rawValue">The raw string which may contain tokens.</param> /// <param name="tokenContext">The tokens available for this content pack.</param> /// <param name="migrator">The migrator which validates and migrates content pack data.</param> /// <param name="error">An error phrase indicating why parsing failed (if applicable).</param> /// <param name="parsed">The parsed value.</param> private bool TryParseEnabled(string rawValue, IContext tokenContext, IMigration migrator, out string error, out bool parsed) { parsed = false; // analyse string if (!this.TryParseTokenString(rawValue, tokenContext, migrator, out error, out TokenString tokenString)) { return(false); } // validate & extract tokens string text = rawValue; if (tokenString.HasAnyTokens) { // only one token allowed if (!tokenString.IsSingleTokenOnly) { error = "can't be treated as a true/false value because it contains multiple tokens."; return(false); } // check token options TokenName tokenName = tokenString.Tokens.First(); IToken token = tokenContext.GetToken(tokenName, enforceContext: false); InvariantHashSet allowedValues = token?.GetAllowedValues(tokenName); if (token == null || token.IsMutable || !token.IsValidInContext) { error = $"can only use static tokens in this field, consider using a {nameof(PatchConfig.When)} condition instead."; return(false); } if (allowedValues == null || !allowedValues.All(p => bool.TryParse(p, out _))) { error = "that token isn't restricted to 'true' or 'false'."; return(false); } if (token.CanHaveMultipleValues(tokenName)) { error = "can't be treated as a true/false value because that token can have multiple values."; return(false); } text = token.GetValues(tokenName).First(); } // parse text if (!bool.TryParse(text, out parsed)) { error = $"can't parse {tokenString.Raw} as a true/false value."; return(false); } return(true); }
public void OneOfEachSpaced() { Tokenizer t = new Tokenizer(StringToStream(" [ <64 65> << true /Name 1 3.14 >> ] %comment")); t.IgnoreComments = false; TokenArrayOpen a1 = t.GetToken() as TokenArrayOpen; Assert.NotNull(a1); TokenStringHex a2 = t.GetToken() as TokenStringHex; Assert.NotNull(a2); Assert.True(a2.Raw == "64 65"); TokenDictionaryOpen a3 = t.GetToken() as TokenDictionaryOpen; Assert.NotNull(a3); TokenKeyword a4 = t.GetToken() as TokenKeyword; Assert.NotNull(a4); Assert.True(a4.Value == ParseKeyword.True); TokenName a5 = t.GetToken() as TokenName; Assert.NotNull(a5); Assert.True(a5.Value == "Name"); TokenInteger a6 = t.GetToken() as TokenInteger; Assert.NotNull(a6); Assert.True(a6.Value == 1); TokenReal a7 = t.GetToken() as TokenReal; Assert.NotNull(a7); Assert.True(a7.Value == 3.14f); TokenDictionaryClose a8 = t.GetToken() as TokenDictionaryClose; Assert.NotNull(a8); TokenArrayClose a9 = t.GetToken() as TokenArrayClose; Assert.NotNull(a9); TokenComment a10 = t.GetToken() as TokenComment; Assert.NotNull(a10); Assert.True(a10.Value == "%comment"); Assert.True(t.GetToken() is TokenEmpty); }
public NeoNep5Service(IClient client) : base(client) { if (client == null) { throw new ArgumentNullException(nameof(client)); } GetTokenBalance = new TokenBalanceOf(client); GetTokenDecimals = new TokenDecimals(client); GetTokenName = new TokenName(client); GetTokenTotalSupply = new TokenTotalSupply(client); GetTokenSymbol = new TokenSymbol(client); }
/// <summary>Get the underlying token which handles a name.</summary> /// <param name="name">The token name.</param> /// <param name="enforceContext">Whether to only consider tokens that are available in the context.</param> /// <returns>Returns the matching token, or <c>null</c> if none was found.</returns> public IToken GetToken(TokenName name, bool enforceContext) { foreach (IContext context in this.Contexts) { IToken token = context.GetToken(name, enforceContext); if (token != null) { return(token); } } return(null); }
public override int GetHashCode() { unchecked { var hashCode = (FromAddress != null ? FromAddress.GetHashCode() : 0); hashCode = (hashCode * 397) ^ (ToAddress != null ? ToAddress.GetHashCode() : 0); hashCode = (hashCode * 397) ^ (TokenName != null ? TokenName.GetHashCode() : 0); hashCode = (hashCode * 397) ^ (Symbol != null ? Symbol.GetHashCode() : 0); hashCode = (hashCode * 397) ^ (TokenType != null ? TokenType.GetHashCode() : 0); hashCode = (hashCode * 397) ^ (TokenId != null ? TokenId.GetHashCode() : 0); return(hashCode); } }
/// <summary>Migrate a token name.</summary> /// <param name="name">The token name to migrate.</param> /// <param name="error">An error message which indicates why migration failed (if any).</param> /// <returns>Returns whether migration succeeded.</returns> public virtual bool TryMigrate(ref TokenName name, out string error) { // tokens which need a high version if (this.AddedTokens.Contains(name.Key)) { error = this.GetNounPhraseError($"using token {name}"); return(false); } // no issue found error = null; return(true); }
List <ResultToken> DFA(string input) { List <ResultToken> Tokens = new List <ResultToken>(); //список выходных токенов ResultToken LastToken = null; //последний сохранненый токен string Output = ""; //выходная строка для токена int CurState = 0; //текущее состояние int InIndex = 0; //текщий индекс входной строки int LastInIndex = 0; Action Action = Action.Continue; //текущее состояние анализатора while (Action != Action.Stop) //читаем входную строку пока не получим останов анализатора { //обнуляем значения при анализе нового токена Output = ""; CurState = 0; LastToken = null; while (CurState != -1) //анализируем входную строку до отсутсвия перехода { TokenName temp = FromStateToTokenName(CurState); if (temp != TokenName.Nothing) { LastToken = new ResultToken(temp.ToString(), Output); LastInIndex = InIndex; } if (InIndex > input.Length - 1) //проверка на окончания входной строки { Action = Action.Stop; break; } CurState = TransitionTable(CurState, input[InIndex]); //новое состояние из таблицы переходов if (LastToken == null && CurState == -1) //останавливаем анализатор если попали в неизвестный символ { throw new LexerException(input[InIndex]); } if (CurState > 0) //запись символа в токен (если не пробел) { Output += input[InIndex]; } InIndex++; } InIndex = LastInIndex; //возврат входного символа к последнему успешному токену if (LastToken != null) { Tokens.Add(LastToken); } } return(Tokens); }
/// <summary>Migrate a token name.</summary> /// <param name="name">The token name to migrate.</param> /// <param name="error">An error message which indicates why migration failed (if any).</param> /// <returns>Returns whether migration succeeded.</returns> public bool TryMigrate(ref TokenName name, out string error) { // apply migrations foreach (IMigration migration in this.Migrations) { if (!migration.TryMigrate(ref name, out error)) { return(false); } } // no issues found error = null; return(true); }
protected TokenDeclInline(VarDict ifd, bool doCheckRun, MethodInfo methInfo) : base(null, null, null) { isTaggedCallsCheckRun = IsTaggedCallsCheckRun(methInfo); name = new TokenName(null, methInfo.Name); retType = GetRetType(methInfo, TokenType.FromSysType(null, methInfo.ReturnType)); argDecl = GetArgDecl(methInfo.GetParameters()); triviality = (doCheckRun || isTaggedCallsCheckRun) ? Triviality.complex : Triviality.trivial; location = new CompValuInline(this); if(ifd == null) ifd = inlineFunctions; ifd.AddEntry(this); }
/********* ** Private methods *********/ /// <summary>Get a new string with tokens substituted.</summary> /// <param name="context">Provides access to contextual tokens.</param> private string GetApplied(IContext context) { if (!this.IsMutable) { return(this.Raw); } return(TokenString.TokenPattern.Replace(this.Raw, match => { TokenName name = TokenName.Parse(match.Groups[1].Value); IToken token = context.GetToken(name, enforceContext: true); return token != null ? token.GetValues(name).FirstOrDefault() : match.Value; })); }
/********* ** Public methods *********/ /// <summary>Construct an instance.</summary> /// <param name="raw">The raw string before token substitution.</param> /// <param name="tokenContext">The available token context.</param> public TokenString(string raw, IContext tokenContext) { // set raw value this.Raw = raw?.Trim(); if (string.IsNullOrWhiteSpace(this.Raw)) { this.ValueImpl = this.Raw; this.IsReadyImpl = true; return; } // extract tokens this.LexTokens = new Lexer().ParseBits(raw, impliedBraces: false).ToArray(); bool isMutable = false; foreach (LexTokenToken lexToken in this.LexTokens.OfType <LexTokenToken>()) { TokenName name = new TokenName(lexToken.Name, lexToken.InputArg?.Text); IToken token = tokenContext.GetToken(name, enforceContext: false); if (token != null) { this.Tokens.Add(name); isMutable = isMutable || token.IsMutable; } else { this.InvalidTokens.Add(lexToken.Text); } } // set metadata this.IsMutable = isMutable; if (!isMutable) { if (this.InvalidTokens.Any()) { this.IsReadyImpl = false; } else { this.GetApplied(tokenContext, out string finalStr, out bool isReady); this.ValueImpl = finalStr; this.IsReadyImpl = isReady; } } this.IsSingleTokenOnly = this.LexTokens.Length == 1 && this.LexTokens.First().Type == LexTokenType.Token; }
public NeoNep5Service(IClient client, string tokenScriptHash) : base(client) { if (client == null) { throw new ArgumentNullException(nameof(client)); } if (string.IsNullOrEmpty(tokenScriptHash)) { throw new ArgumentNullException(nameof(tokenScriptHash)); } GetTokenBalance = new TokenBalanceOf(client, tokenScriptHash); GetTokenDecimals = new TokenDecimals(client, tokenScriptHash); GetTokenName = new TokenName(client, tokenScriptHash); GetTokenTotalSupply = new TokenTotalSupply(client, tokenScriptHash); GetTokenSymbol = new TokenSymbol(client, tokenScriptHash); }
/** * @brief Add an inline function definition to the dictionary. * @param ifd = dictionary to add inline definition to * @param doCheckRun = true iff the generated code or the function itself can possibly call CheckRun() * @param nameArgSig = inline function signature string, in form <name>(<arglsltypes>,...) * @param retType = return type, use TokenTypeVoid if no return value */ protected TokenDeclInline(VarDict ifd, bool doCheckRun, string nameArgSig, TokenType retType) : base(null, null, null) { this.retType = retType; this.triviality = doCheckRun ? Triviality.complex : Triviality.trivial; int j = nameArgSig.IndexOf('('); this.name = new TokenName(null, nameArgSig.Substring(0, j++)); this.argDecl = new TokenArgDecl(null); if (nameArgSig[j] != ')') { int i; TokenName name; TokenType type; for (i = j; nameArgSig[i] != ')'; i++) { if (nameArgSig[i] == ',') { type = TokenType.FromLSLType(null, nameArgSig.Substring(j, i - j)); name = new TokenName(null, "arg" + this.argDecl.varDict.Count); this.argDecl.AddArg(type, name); j = i + 1; } } type = TokenType.FromLSLType(null, nameArgSig.Substring(j, i - j)); name = new TokenName(null, "arg" + this.argDecl.varDict.Count); this.argDecl.AddArg(type, name); } this.location = new CompValuInline(this); if (ifd == null) { ifd = inlineFunctions; } ifd.AddEntry(this); }
public static TokenType GetRValType(TokenName name) { if (name.val == "count") { return(new TokenTypeInt(name)); } if (name.val == "clear") { return(clearDelegate); } if (name.val == "index") { return(indexDelegate); } if (name.val == "value") { return(valueDelegate); } return(new TokenTypeVoid(name)); }
public Token(TokenName name_, string value_) { name = name_; value = value_; }
public bool Expect(TokenName name) { Token token = new Token(); return this.Next(out token) && token.name == name; }