private void Add(TokenCategory category, Color foreColor) { SyntaxHighlightProperties item = new SyntaxHighlightProperties(); item.ForeColor = foreColor; properties.Add(category, item); }
private static bool IsWhitespace(TokenCategory category) { return(category == TokenCategory.Comment || category == TokenCategory.DocComment || category == TokenCategory.LineComment || category == TokenCategory.WhiteSpace); }
public Token(string lexeme, TokenCategory category, int row, int column) { this.lexeme = lexeme; this.category = category; this.row = row; this.column = column; }
public Token(TokenCategory category, string lexem, int start, int end) { Lexem = lexem; Start = start; End = end; Category = category; }
Token _lastNonWS; // most recent "real", non-whitespace value of _current public override Maybe <Token> NextToken() { Maybe <Token> next; var pending = _pending.TryPopFirst(); if (pending.HasValue) { _lastNonWS = pending.Value; return(_current = pending); } else { next = Lexer.NextToken(); } if (next.HasValue) { var nextVal = next.Value; TokenCategory nextCat = GetTokenCategory(nextVal); if (_bracketDepth > 0) { // Processing disabled... wait for close brackets _curLine = Lexer.LineNumber; _lastNonWS = nextVal; _bracketDepth += (short)nextCat; _curCat = nextCat; return(_current = next); } else if (nextCat == TokenCategory.Whitespace) { return(_current = next); // ignore whitespace, pass it through } int nextLine = Lexer.LineNumber; bool newline = nextLine != _curLine; if (newline || _curCat == TokenCategory.IndentTrigger) { HandleNextToken(ref next, ref nextCat, newline); nextVal = next.Value; } _lastNonWS = nextVal; _bracketDepth += (short)nextCat; _curLine = nextLine; _curCat = nextCat; return(_current = next); } else { // EOF if (!_eofHandledAlready) { _eofHandledAlready = true; TokenCategory nextCat = TokenCategory.Other; HandleNextToken(ref next, ref nextCat, true); } return(_current = next); } }
public SyntaxError(TokenCategory expectedCategory, Token token) : base(String.Format( "Syntax Error: Expecting {0} but found {1}", expectedCategory, token.Category)) { }
public ActionResult DeleteConfirmed(int id) { TokenCategory tokenCategory = db.Tokens.Find(id); db.Tokens.Remove(tokenCategory); db.SaveChanges(); return(RedirectToAction("Index")); }
private void AutoNext(ref Maybe <Token> next, ref TokenCategory nextCat, bool atEof) { if (!next.HasValue && !atEof) { next = Lexer.NextToken(); nextCat = next.HasValue ? GetTokenCategory(next.Value) : TokenCategory.Other; } }
void Add(TokenCategory category, Color foreColor) { var item = new SyntaxHighlightProperties { ForeColor = foreColor }; _properties.Add(category, item); }
public bool expectToken(string tokStr, TokenCategory tc) { if (Curs.TokenObj.TokenCategory == tc && Curs.TokenObj.theToken.Equals(tokStr)) { // we match the category and the string return(Curs.advance()); } throw new FormatException($"Fatal Format Error in {TokFile.FilePath}:\n\t Expected <{tokStr}:{tc}> got <{Curs.TokenObj.theToken}:{Curs.TokenObj.TokenCategory}>."); // return FileIsValid = false; }
public HealthTokenClass(string label, int attack, TokenCategory category, int fearGeneratedWhenDestroyed) { Label = label; Initial = label[0]; Category = category; Attack = attack; this.FearGeneratedWhenDestroyed = fearGeneratedWhenDestroyed; }
public Token Expect(TokenCategory category) { if (Current == category) { Token current = tokenStream.Current; tokenStream.MoveNext(); return current; } else { throw new SyntaxError(); } }
public ActionResult Edit([Bind(Include = "ID,Type,Content,Value,ValueLabel,ValueRange,ValueMin")] TokenCategory tokenCategory) { if (ModelState.IsValid) { db.Entry(tokenCategory).State = EntityState.Modified; db.SaveChanges(); return(RedirectToAction("Index")); } return(View(tokenCategory)); }
// For Functions public EqOperator(string name, TokenCategory type, int maxArgs, CalculationMethod calculationMethod) { Name = name; Type = type; IsLeftAssociative = true; Precedence = 8; // functions have the highest precedence! _calculation = calculationMethod; MaxArguments = maxArgs; MinArguments = maxArgs; }
public TokenInfo(TokenKind kind, TokenCategory category, string pattern, TokenAction action, TokenAction action2) { Kind = kind; Category = category; Pattern = pattern; Action = action; Action2 = action2; Prec = 0; Assoc = 0; }
public Terminal(string name, TokenCategory category, TermFlags flags) : base(name) { Category = category; this.Flags |= flags; if (Category == TokenCategory.Outline) { this.SetFlag(TermFlags.IsPunctuation); } OutputTerminal = this; }
// For Functions public EqOperator(string name, TokenCategory type, int minArgs, int maxArgs, CalculationMethod calculationMethod) { Name = name; Type = type; IsLeftAssociative = true; Precedence = 0; _calculation = calculationMethod; MaxArguments = maxArgs; MinArguments = minArgs; }
public override TokenInfo ReadToken() { Token token = tokenizer.GetNext(); SourceLocation location = new SourceLocation(token.StartPosition, token.StartLine, token.StartColumn); SS.SourceSpan span = new SS.SourceSpan(ConvertToSSSrcLocation(location), ConvertToSSSrcLocation(tokenizer.Position)); TokenTriggers trigger = GetTrigger(token.Kind); TokenCategory category = GetCategory(token.Kind); return(new TokenInfo(span, category, trigger)); }
SyntaxHighlightProperties GetTokensMapping(TokenCategory category) { if (TokensMapping.ContainsKey(category)) { return(TokensMapping[category]); } else { return(textProperties); } }
// For Operators and Constants public EqOperator(string name, TokenCategory type, int precedence, int arguments, bool isLeftAssociated, CalculationMethod calculationMethod) { Name = name; Type = type; IsLeftAssociative = isLeftAssociated; Precedence = precedence; _calculation = calculationMethod; MaxArguments = -1; // Default Max Arguments - Unlimited MinArguments = arguments; }
public SyntaxError(TokenCategory expectedCategory, Token token) : base(String.Format( "Syntax Error: Expecting {0} \n" + "but found {1} (\"{2}\") at row {3}, column {4}.", expectedCategory, token.Category, token.Lexeme, token.Row, token.Column)) { }
public SyntaxHighlightProperties CalculateTokenCategoryHighlight(TokenCategory category) { if (properties.TryGetValue(category, out var result)) { return(result); } else { return(properties[TokenCategory.Text]); } }
public ActionResult Create([Bind(Include = "ID,Type,Content,Value,ValueLabel,ValueRange,ValueMin")] TokenCategory tokenCategory) { if (ModelState.IsValid) { db.Tokens.Add(tokenCategory); db.SaveChanges(); return(RedirectToAction("Index")); } return(View(tokenCategory)); }
public Token Expect(TokenCategory category) { if (Current == category) { previous = stream.Current; stream.MoveNext(); return(previous); } else { throw new SyntaxError(String.Format("Syntax Error: Expected {0}, given '{1}' at ({2}, {3})", ErrorFormat(category.ToString()), stream.Current.Value, previous.Row, previous.LastIndex())); } }
protected void Term(Terminal terminal, TokenCategory category, TokenType type) { var config = (TokenInfo)terminal.AstNodeConfig; if (config == null) { config = new TokenInfo { TokenCategory = category }; terminal.AstNodeConfig = config; } TokenTypeToTerminals.Add(type, terminal); }
private Token Expect(TokenCategory category) { if (CurrentToken == category) { Token current = tokenStream.Current; tokenStream.MoveNext(); return(current); } else { throw new SyntaxError(category, tokenStream.Current); } }
// GET: TokenCategories/Edit/5 public ActionResult Edit(int?id) { if (id == null) { return(new HttpStatusCodeResult(HttpStatusCode.BadRequest)); } TokenCategory tokenCategory = db.Tokens.Find(id); if (tokenCategory == null) { return(HttpNotFound()); } return(View(tokenCategory)); }
/// <summary> /// For implicit tokens /// </summary> /// <param name="token"></param> /// <returns></returns> private static TokenCategory Token(this string token) { token = EscapeEre(token); if (TokenCategory.ImplicitTokenCategories.ContainsKey(token)) { return(TokenCategory.ImplicitTokenCategories[token]); } else { TokenCategory result = token; TokenCategory.ImplicitTokenCategories.Add(token, result); return(result); } }
public Token Expect(TokenCategory category) { Console.WriteLine(CurrentToken2); if (CurrentToken == category) { Token current = tokenStream.Current; tokenStream.MoveNext(); return(current); } else { throw new SyntaxError(category, tokenStream.Current); } }
public Token Expect(TokenCategory category) { if (Current == category) { Token current = tokenStream.Current; tokenStream.MoveNext(); return(current); } else { //console.writeLine(category); throw new SyntaxError(); } }
public void MoveUntil(Func<char, char, bool> predicate, TokenCategory tokenCategory) { for (; _position < _input.Length && predicate(_input[_position], GetChar(_position + 1)); _position++) { _lexemBuffer.Append(_input[_position]); } if (_lexemBuffer.Length > 0) { var start = _position - _lexemBuffer.Length; Put(new Token(tokenCategory, _lexemBuffer.ToString(), start, start + _lexemBuffer.Length)); _lexemBuffer.Clear(); } }
public Token(char symbol, TokenCategory cat = TokenCategory.Undefined, int precedence = -1, int argCount = 0, bool isLeftAssoc = true) { if (cat == TokenCategory.Bool) { isVariable = true; } this.Category = cat; this.Symbol = symbol; this.Precedence = precedence; this.IsLeftAssoc = isLeftAssoc; this.ArgCount = argCount; }
public Token(TokenCategory category, int start, int end) { Category = category; Start = start; End = end; }
private bool ElementEnd(ref TokenCategory tokenCategory) { if (!_lexerState.Can('/', TokenCategory.Slash)) return false; _lexerState.Must(() => _commonGrammar.Name(), TokenCategory.Name); _lexerState.Can(() => _commonGrammar.Whitespace(), TokenCategory.Whitespace); tokenCategory = TokenCategory.ElementEnd; return true; }
void Add(TokenCategory category, Color foreColor) { var item = new SyntaxHighlightProperties{ForeColor = foreColor}; _properties.Add(category, item); }
public SyntaxHighlightProperties CalculateTokenCategoryHighlight(TokenCategory category) { SyntaxHighlightProperties result; return _properties.TryGetValue(category, out result) ? result : _properties[TokenCategory.Text]; }
public CompositeToken(TokenCategory category, IList<Token> tokens) : base(category, tokens.Min(t => t.Start), tokens.Max(t => t.End)) { Tokens = tokens; Lexem = tokens.OrderBy(t => t.Start).Aggregate(string.Empty, (val, token) => val + token.Lexem); }
/// <summary> /// Initializes a new instance of the <see cref="TokenInfo"/> class. /// </summary> /// <param name="tokenCategory">The token category.</param> public TokenInfo(TokenCategory tokenCategory) { TokenCategory = tokenCategory; IsCaseInsensitive = false; }
public Grammar(TokenCategory[] tokenCategories, params PrecedenceGroup[] precedenceGroups) { _tokenCategories = tokenCategories; _precedenceGroups = precedenceGroups; }
public CompositeToken(TokenCategory category, string lexem, int start, int end) : base(category, lexem, start, end) { }
public Token Must(Action name, TokenCategory tokenCategory) { if (Eof()) Problem($"Unexpected end of file. Expected a {tokenCategory} token."); name(); if (_currentToken.Category != tokenCategory) Problem($"Expected '{tokenCategory}'."); return _currentToken; }
public void Must(char c, TokenCategory token) { if (CurrentChar() != c) Problem($"Character {c} expected."); Put(new Token(token, c.ToString(), _position, _position + 1)); _position++; }
private int GetTrailingMultiLineTokens(JSScanner JSScanner, ITextSnapshot snapshot, TokenCategory tokenCategory, int currentLine, object state) { int nextLine = currentLine + 1; var prevState = state; int length = 0; while (nextLine < snapshot.LineCount) { LineTokenization nextLineTokenization; if (!_tokenCache.TryGetTokenization(nextLine, out nextLineTokenization)) { nextLineTokenization = TokenizeLine(JSScanner, snapshot, prevState, nextLine); prevState = nextLineTokenization.State; _tokenCache[nextLine] = nextLineTokenization; } if (nextLineTokenization.Tokens.Length != 0) { if (nextLineTokenization.Tokens[0].Category != tokenCategory) { break; } length += nextLineTokenization.Tokens[0].SourceSpan.Length; } nextLine++; } return length; }
private int GetLeadingMultiLineTokens(JSScanner JSScanner, ITextSnapshot snapshot, TokenCategory tokenCategory, int firstLine, int currentLine, out int validPrevLine, ref TokenInfo startToken) { validPrevLine = currentLine; int prevLine = currentLine - 1; int length = 0; while (prevLine >= 0) { LineTokenization prevLineTokenization = GetPreviousTokenization(JSScanner, snapshot, firstLine, prevLine); if (prevLineTokenization.Tokens.Length != 0) { if (prevLineTokenization.Tokens[prevLineTokenization.Tokens.Length - 1].Category != tokenCategory) { break; } startToken = prevLineTokenization.Tokens[prevLineTokenization.Tokens.Length - 1]; length += startToken.SourceSpan.Length; } validPrevLine = prevLine; prevLine--; if (prevLineTokenization.Tokens.Length > 1) { // http://pytools.codeplex.com/workitem/749 // if there are multiple tokens on this line then our multi-line string // is terminated. break; } } return length; }
public Terminal(string name, TokenCategory category) : this(name) { Category = category; }
public Token(TokenCategory category, string value) { Category = category; Value = value; }
public bool Can(char c, TokenCategory category) { if (c == CurrentChar()) { Put(new Token(category, c.ToString(), _position, _position + 1)); _position++; return true; } return false; }