public override void FromStackItem(StackItem stackItem) { base.FromStackItem(stackItem); Array array = (Array)((Struct)stackItem)[1]; Tokens.AddRange(array.Select(p => p.GetSpan().ToArray())); }
public List <Token> Scan() { Tokens.Add(new Token(TokenType.INTERPOLATION_START, null, null, 0)); Int32 Pos = 0; while (!IsAtEnd()) { Char c = Advance(); if (c == '{') { if (Match('{')) { // This is the end } else { // We have a string literal Tokens.Add(new Token(TokenType.STRING, Source.Substring(Pos, Current - Pos - 1), Source.Substring(Pos, Current - Pos - 1), -1)); Pos = Current; while ((c = Advance()) != '}') { } Tokens.AddRange(new Scanner(Source.Substring(Pos - 1, Current - Pos + 1)).Scan(false)); Pos = Current; } } } if (Pos < Current) { Tokens.Add(new Token(TokenType.STRING, Source.Substring(Pos, Current - Pos), Source.Substring(Pos, Current - Pos), -1)); } Tokens.Add(new Token(TokenType.INTERPOLATION_END, null, null, 0)); return(Tokens); }
private async void OnAnalyzing(IAnalyzer analyzer) { var tokens = await analyzer.AnalyzeAsync(Text); Tokens.Clear(); Tokens.AddRange(tokens); }
public TokenSet(IProjectNode node, IRandom random) { TokenSetNode tsn = node as TokenSetNode; Name = tsn.Name; Tokens.AddRange(tsn.Tokens); m_Random = random; }
private void PopulateTokens(Scanner scanner) { TokenType[] tokenWithTexts = { TokenType.STRING, TokenType.BOOL, TokenType.COMMENT, TokenType.COMP, TokenType.DOUBLE, TokenType.INTEGER, TokenType.IDENTIFIER, TokenType.OPER }; Tokens.AddRange( scanner.RecognizedTokens.Select( t => { var result = $"{t.Type.ToString()} ({t.Line}:{t.Column})"; if (tokenWithTexts.Contains(t.Type)) { result += $" : {t.Text}"; } return(result); })); }
internal TokenAssociateTransactionBody(IEnumerable <Address> tokens, Address account) : this() { if (tokens is null) { throw new ArgumentNullException(nameof(tokens), "The list of tokens cannot be null."); } Tokens.AddRange(tokens.Select(token => { if (token.IsNullOrNone()) { throw new ArgumentOutOfRangeException(nameof(tokens), "The list of tokens cannot contain an empty or null address."); } return(new TokenID(token)); })); if (Tokens.Count == 0) { throw new ArgumentOutOfRangeException(nameof(tokens), "The list of tokens cannot be empty."); } Account = new AccountID(account); }
protected override void ScanToken() { Char c = Advance(); switch (c) { case '[': AddToken(TokenType.LSQRBRACE); // TODO break; case ']': AddToken(TokenType.RSQRBRACE); // TODO break; case '$': AddToken(TokenType.DOLLAR); if (Match('"')) { var value = String().Substring(1); // Strip of the leading quote var scanner = new StringInterpolationScanner(value); var t = scanner.Scan(); Tokens.AddRange(t); } else { Cox.Error(Line, $"Expected '\"'"); } break; case '(': AddToken(TokenType.LPAREN); break; case ')': AddToken(TokenType.RPAREN); break; case '{': AddToken(TokenType.LBRACE); break; case '}': AddToken(TokenType.RBRACE); break; case ',': AddToken(TokenType.COMMA); break; case '.': AddToken(TokenType.DOT); break; case '-': if (Match('-')) { AddToken(TokenType.MINUS_MINUS); } else { if (Match('=')) { AddToken(TokenType.MINUS_EQUALS); } else { AddToken(TokenType.MINUS); } } break; case '+': if (Match('+')) { AddToken(TokenType.PLUS_PLUS); } else { if (Match('=')) { AddToken(TokenType.PLUS_EQUALS); } else { AddToken(TokenType.PLUS); } } break; case '*': if (Match('=')) { AddToken(TokenType.MUL_EQUALS); } else { AddToken(TokenType.MUL); } break; case '/': if (Match('=')) { AddToken(TokenType.DIV_EQUALS); } else { if (Match('/')) { // A comment goes until the end of the line. while (Peek() != '\n' && !IsAtEnd()) { Advance(); } } else { if (Match('*')) { // Multi line comment, we need to look for a */ while (!(Peek() == '*' && PeekNext() == '/') && !IsAtEnd()) { if (Peek() == '\n') { Line++; } Advance(); } if (IsAtEnd()) { Cox.Error(Line, $"End-of-File found, '*/' expected"); } else { Advance(); Advance(); } } else { AddToken(TokenType.DIV); } } } break; case ';': AddToken(TokenType.SEMICOLON); break; case ':': AddToken(TokenType.COLON); break; case '?': AddToken(TokenType.QUESTIONMARK); break; case '&': if (Match('&')) { AddToken(TokenType.AND); } else { AddToken(TokenType.BINARY_AND); } break; case '|': if (Match('|')) { AddToken(TokenType.OR); } else { AddToken(TokenType.BINARY_OR); } break; case '^': AddToken(TokenType.BINARY_XOR); break; case '~': AddToken(TokenType.BINARY_NOT); break; case '!': AddToken(Match('=') ? TokenType.BANG_EQUAL : TokenType.BANG); break; case '=': AddToken(Match('=') ? TokenType.EQUAL_EQUAL : TokenType.EQUAL); break; case '<': if (Match('<')) { AddToken(TokenType.BINARY_LEFTSHIFT); } else if (Match('=')) { AddToken(TokenType.LESS_EQUAL); } else { AddToken(TokenType.LESS); } break; case '>': if (Match('>')) { AddToken(TokenType.BINARY_RIGHTSHIFT); } else if (Match('=')) { AddToken(TokenType.GREATER_EQUAL); } else { AddToken(TokenType.GREATER); } break; case '"': { String value = String(); if (value != null) { AddToken(TokenType.STRING, value); } } break; case ' ': case '\t': case '\r': break; case '\n': Line++; break; default: if (Char.IsDigit(c)) { Number(); } else if (Char.IsLetter(c) || c == '_') { Identifier(); } else { Cox.Error(Line, $"Unexpected Character {c}"); } break; } }
public void Tokenize(IEnumerable <string> lines) { afterWord = LexerState.None; parensDepth = 0; Line = 0; foreach (string line in lines) { Line++; if (line.Length == 0) { continue; } CurrentLine = line; Column = 0; State = LexerState.None; endOfLine = false; currentToken = string.Empty; while (!endOfLine) { char ch = Next(); LexerState guess = Guess(ch); State = stateMachine[State](ch, guess); if (!stateMachine.ContainsKey(State)) { throw new TokenizationException($"Unknown tokenizer state: {State}"); } } // process end of line in case something is missing stateMachine[State]('\n', LexerState.EndOfLine); // special handling for 'Include' if (Tokens.Count >= 2 && Tokens[Tokens.Count - 2].Value == "Include") { Token filename = Tokens[Tokens.Count - 1]; Tokens.RemoveRange(Tokens.Count - 2, 2); Tokenizer jt = new Tokenizer(Context); if (filename.Type != TokenType.String) { throw new ArgumentTypeException("Can only Include a (filename) string"); } ScriptSource include = Context.Djn.FindByName <ScriptSource>(filename.Value); if (include == null) { throw new MissingResourceException($"Unknown Source resource: {filename.Value}"); } jt.Tokenize(include.Source.Split('\n')); Tokens.AddRange(jt.Tokens); } else { Tokens.Add(new Token { Type = TokenType.EOL }); } } }