/// <summary> /// Initializes a new isntance of the <see cref="RegexLexerContext"/> class /// </summary> /// <param name="position">the position into the source file</param> /// <param name="match">the regular expression match data</param> /// <param name="stateStack">The stack of states</param> /// <param name="ruleTokenType">The token type the rule specified to emit</param> public RegexLexerContext(int position, Match match, Stack<string> stateStack, TokenType ruleTokenType) { Position = position; Match = match; StateStack = stateStack; RuleTokenType = ruleTokenType; }
private static string TokenTypeString(TokenType t) { switch (t) { case TokenType.OpenBracket: return "TOK_OPEN_BRACKET"; case TokenType.CloseBracket: return "TOK_CLOSE_BRACKET"; case TokenType.Data: return "TOK_DATA"; case TokenType.Comma: return "TOK_COMMA"; case TokenType.Key: return "TOK_KEY"; case TokenType.BinaryData: return "TOK_BINARY_DATA"; } Debug.Assert(false); return ""; }
public Token(TokenType type, string ltoken, string token) { this.type = type; this.utoken = token; this.token = ltoken; this.keyword = Keywords.NotAKeyword; }
/// <summary> /// Splits tokenchain into separated tokens with a given separator token type. /// Also ignores subcodeblocks delimited by (, [ or {. /// </summary> /// <param name="tokens"></param> /// <param name="index"></param> /// <param name="length"></param> /// <param name="splitter"></param> /// <returns></returns> public static List<List<Token>> SplitTokenChain(Token[] tokens, int index, int length, TokenType splitter) { var round = 0; var square = 0; var curly = 0; var listList = new List<List<Token>>(); var list = new List<Token>(); for (var i = index; i < index + length; i++) { var token = tokens[i]; if (token.Type == TokenType.RoundOpen) round++; if (token.Type == TokenType.SquareOpen) square++; if (token.Type == TokenType.CurlyOpen) curly++; if (token.Type == TokenType.RoundClose) round--; if (token.Type == TokenType.SquareClose) square--; if (token.Type == TokenType.CurlyClose) curly--; if (round <= 0 && square <= 0 && curly <= 0 && token.Type == splitter) { listList.Add(new List<Token>() { token}); // add splitters too listList.Add(list); list = new List<Token>(); } else { list.Add(token); } } listList.Add(list); // add last part, only if at least one separator is detected return listList; }
public VxSqlToken(TokenType t, string n, string l) { type = t; name = n; leading_space = l; trailing_space = ""; }
public Token(TokenType type, string text, int offset, int length) { m_Type = type; m_Text = text; m_Offset = offset; m_Length = length; }
public void OnCardRead(TokenType type, uint pin, List<ushort> terminals, List<ushort> zones) { if(!this.InvokeRequired) { if(this.displayCardData) { string tokenType = "User Card"; this.displayCardData = false; switch (type) { case TokenType.Proxy: tokenType = "User Proxy Card"; break; case TokenType.AdminToggle: tokenType = "Control Card (Admin Permissions Toggle)"; break; case TokenType.Enrol: tokenType = "Control Card (Enrol)"; break; case TokenType.Revoke: tokenType = "Control Card (Revoke)"; break; case TokenType.Verify: tokenType = "Control Card (Verify)"; break; case TokenType.Access: tokenType = "Control Card (Access Permissions Set)"; break; default: break; } string text = string.Format("Card information: \nType: {0}\nCard ID: {1}\nZones: {2}\nTerminals :{3}", tokenType, pin.ToString(), zones!= null && zones.Count > 0 ? string.Join(", ", zones.ToArray()) : "None", terminals != null && terminals.Count > 0 ? string.Join(", ", terminals.ToArray()) : "None"); User u = QuicheProvider.Instance.Client.Get<User>(pin.ToString()); if (u!=null && u.Name!= null && u.Name!="") text = string.Format("{0}\nUser: {1}", text, u.Name); MessageBox.Show(text, "QuiRing: Card Info", MessageBoxButtons.OK, MessageBoxIcon.Information); } } else this.BeginInvoke(new Action<TokenType, uint, List<ushort>, List<ushort>>(this.OnCardRead), type, pin, terminals, zones); }
public bool Equals(TokenType tokenType, string value) { Debug.Assert(Enum.IsDefined(typeof(TokenType), tokenType)); Debug.Assert(value != null); return (this.type == tokenType) && string.Equals(this.value, value, StringComparison.Ordinal); }
public Token(TokenType tokenType, string tokenString, int lineNr, int linePosition) { m_tokenType = tokenType; m_tokenString = tokenString; m_lineNr = lineNr; m_linePosition = linePosition; }
public Token(TokenType type, string text, int start, int length) { this.type = type; this.text = text; this.start = start; this.length = length; }
public TokenType Scan() { TokenType t = ScanInternal(); _context.TokenType = t; _lastTokenType = t; return t; }
internal static OperatorType GetOperatorType(TokenType type) { if (type == TokenType.OP_UMINUS) return OperatorType.UNARY; if (type == TokenType.OP_UPLUS) return OperatorType.UNARY; if (type == TokenType.OP_UNOT) return OperatorType.UNARY; return OperatorType.BINARY; }
public CLexer() { m_strExpr = ""; m_nNextPos = 0; m_PreviousTokenType = TokenType.T_EOL; m_currToken = new Token(); }
public UnaryExpression(TokenType op, Expression operand) { if (operand != null) AddChild (operand, OperandRole); Operator = op; }
static XmlTokens() { StartComment = new TokenType("StartComment", "<!--"); EndComment = new TokenType("EndComment", "-->"); CloseElement = new TokenType("CloseElement", "/>"); ComplexCloseElement = new TokenType("ComplexCloseElement", "</"); }
public Token(TokenType type, int debugIndex) : this() { this.Type = type; this.Value = null; this.DebugIndex = debugIndex; }
public BinaryExpression(int line, int column, Expression lhs, TokenType op, Expression rhs) : base(line, column) { this.lhs = lhs; this.op = op; this.rhs = rhs; }
public Token(TokenType type, string value, int debugIndex) : this() { this.Type = type; this.Value = value; this.DebugIndex = debugIndex; }
public Token(TokenType type, string value, int position, int length) { this.Type = type; this.Value = value; this.Position = position; this.Length = length; }
private Node(Node parent, TokenType tokenType, string text = "") { Parent = parent; TokenType = tokenType; Text = text ?? ""; Children = new List<Node>(); }
public override ScriptObject Compute(TokenType type, ScriptObject value) { if (type != TokenType.Plus) { return base.Compute(type, value); } ScriptTable table = value as ScriptTable; if (table == null) throw new ExecutionException(m_Script, this, "table [+] 操作只限两个[table]之间,传入数据类型:" + value.Type); ScriptTable ret = m_Script.CreateTable(); ScriptObject obj = null; ScriptScriptFunction func = null; foreach (KeyValuePair<object, ScriptObject> pair in m_listObject) { obj = pair.Value.Clone(); if (obj is ScriptScriptFunction) { func = (ScriptScriptFunction)obj; if (!func.IsStaticFunction) func.SetTable(ret); } ret.m_listObject[pair.Key] = obj; } foreach (KeyValuePair<object, ScriptObject> pair in table.m_listObject) { obj = pair.Value.Clone(); if (obj is ScriptScriptFunction) { func = (ScriptScriptFunction)obj; if (!func.IsStaticFunction) func.SetTable(ret); } ret.m_listObject[pair.Key] = obj; } return ret; }
// Constructor public Token(TokenType type, int startOffset, int length, object data = null) { this.type = type; this.startOffset = startOffset; this.length = length; this.data = data; }
protected MonadicExpression(TextPosition tp, TokenType op, Element exp) : base(tp) { Operator = op; Exp = exp; AppendChild(Exp); }
public Token(string name, TokenType t, int line, int column) { this.Text = name; this.Type = t; this.Line = line; this.Column = column; }
public Token(Regex match, TokenType tokenType, OperationType operationType = OperationType.Operator, TokenDiscardPolicy discardPolicy = TokenDiscardPolicy.Keep) { m_TokenType = tokenType; m_DiscardPolicy = discardPolicy; m_OperationType = operationType; m_Regex = match; }
public Token(DateTimeOffset created, Guid tokenGuid, string tokenName, TokenType tokenType) { this.Created = created; this.TokenGuid = tokenGuid; this.TokenName = tokenName; this.TokenType = tokenType; }
/// <summary> /// Gets the value of the next token of a certain type /// </summary> /// <param name="type">The type of token to retrieve</param> /// <returns>The token's value</returns> private string _get(TokenType type) { var token = _read(); if (token.Type != type) throw new InvalidDataException(token.Type.ToString()); return token.Value; }
public Token(TokenType type, string term, int start, int length) { Start = start; Length = length; Type = type; Term = term; }
public static extern bool DuplicateTokenEx( SafeFileHandle hExistingToken, uint dwDesiredAccess, SecurityAttributes lpTokenAttributes, SecurityImpersonationLevel impersonationLevel, TokenType tokenType, out IntPtr hNewToken);
private void DiscardToken(TokenType tokenType) { if (_lookaheadFirst.TokenType != tokenType) throw new LqlParserException(string.Format("Expected {0} but found: {1}", tokenType.ToString().ToUpper(), _lookaheadFirst.Value)); DiscardToken(); }
public ParsedToken(T value, TokenType tokenType) : base(value, tokenType) { ParsedValue = value; }
// Process [link] and ![image] directives Token ProcessLinkOrImageOrFootnote() { // Link or image? TokenType token_type = SkipChar('!') ? TokenType.img : TokenType.link; // Opening '[' if (!SkipChar('[')) { return(null); } // Is it a foonote? var savepos = Position; if (m_Markdown.ExtraMode && token_type == TokenType.link && SkipChar('^')) { SkipLinespace(); // Parse it string id; if (SkipFootnoteID(out id) && SkipChar(']')) { // Look it up and create footnote reference token int footnote_index = m_Markdown.ClaimFootnote(id); if (footnote_index >= 0) { // Yes it's a footnote return(CreateToken(TokenType.footnote, new FootnoteReference(footnote_index, id))); } } // Rewind Position = savepos; } if (DisableLinks && token_type == TokenType.link) { return(null); } bool extraMode = m_Markdown.ExtraMode; // Find the closing square bracket, allowing for nesting, watching for // escapable characters Mark(); int depth = 1; while (!Eof) { char ch = Current; if (ch == '[') { depth++; } else if (ch == ']') { depth--; if (depth == 0) { break; } } this.SkipEscapableChar(extraMode); } // Quit if end if (Eof) { return(null); } // Get the link text and unescape it string link_text = Utils.UnescapeString(Extract(), extraMode); // The closing ']' SkipForward(1); // Save position in case we need to rewind savepos = Position; // Inline links must follow immediately if (SkipChar('(')) { // Extract the url and title var link_def = LinkDefinition.ParseLinkTarget(this, null, m_Markdown.ExtraMode); if (link_def == null) { return(null); } // Closing ')' SkipWhitespace(); if (!SkipChar(')')) { return(null); } List <string> specialAttributes = null; if (extraMode && DoesMatch('{')) { int end; specialAttributes = Utils.StripSpecialAttributes(this.Input, this.Position, out end); if (specialAttributes != null) { Position = end; } } // Create the token return(CreateToken(token_type, new LinkInfo(link_def, link_text, specialAttributes))); } // Optional space or tab if (!SkipChar(' ')) { SkipChar('\t'); } // If there's line end, we're allow it and as must line space as we want // before the link id. if (Eol) { SkipEol(); SkipLinespace(); } // Reference link? string link_id = null; if (Current == '[') { // Skip the opening '[' SkipForward(1); // Find the start/end of the id Mark(); if (!Find(']')) { return(null); } // Extract the id link_id = Extract(); // Skip closing ']' SkipForward(1); } else { // Rewind to just after the closing ']' Position = savepos; } // Link id not specified? if (string.IsNullOrEmpty(link_id)) { // Use the link text (implicit reference link) link_id = Utils.NormalizeLineEnds(link_text); // If the link text has carriage returns, normalize // to spaces if (!object.ReferenceEquals(link_id, link_text)) { while (link_id.Contains(" \n")) { link_id = link_id.Replace(" \n", "\n"); } link_id = link_id.Replace("\n", " "); } } // Find the link definition abort if not defined var def = m_Markdown.GetLinkDefinition(link_id); if (def == null) { return(null); } // Create a token. // [FB]: Currently not supported: special attributes on reference links. return(CreateToken(token_type, new LinkInfo(def, link_text, null))); }
public Token(TokenType type, object content, int row = 0, int col = 0) { TokenType = type; Content = content; Location = new Position(row, col); }
static Precedence InfixPrecedenceOf(TokenType la) { return(_infixPrecedenceTable[(int)la]); }
private Token MakeToken(TokenType type) { return(new Token(type, _source.Substring(_start, _index - _start), new TokenPosition(_start, _line, _column, _fname))); }
public void Register(ComponentSelector selector, TokenType type) { selectors[type] = selector; }
public void Register(CompilerComponent component, TokenType type) { Register(new UnconditionalSelector(component), type); }
public Token(TokenType type, string val, int lineNumber) { this.Type = type; this.Value = val; this.LineNumber = lineNumber; }
public Token(TokenType type, string text, int startPosition) { Type = type; Text = text; Position = startPosition; }
private Node Statement() { // | (ID LPAREN) => functionCall END_STATEMENT // | VARIABLE! ASSIGN! expression END_STATEMENT // | RETURN expression END_STATEMENT // | IF | WHILE | FOR_EACH TokenType type = LookAhead(1); if (type == TokenType.VARIABLE && LookAhead(2) == TokenType.LPAREN) { Node funcCall = FunctionCall(); Match(TokenType.END_STATEMENT); return(funcCall); } else if (type == TokenType.VARIABLE && ((LookAhead(2) == TokenType.PLUS && LookAhead(3) == TokenType.PLUS) || // i++; (LookAhead(2) == TokenType.MINUS) && LookAhead(3) == TokenType.MINUS) || // i--; ((LookAhead(2) == TokenType.PLUS || LookAhead(2) == TokenType.MINUS) && // i+=n; or i-=n; LookAhead(3) == TokenType.EQUAL)) { Node selfExpression = Expression(); Match(TokenType.END_STATEMENT); return(selfExpression); } else if (type == TokenType.VARIABLE || type == TokenType.VAR) { return(VariableDeclaration()); } else if (type == TokenType.RETURN) { SourcePosition pos = Match(TokenType.RETURN).Position; Node expression = Expression(); Match(TokenType.END_STATEMENT); return(new ReturnNode(pos, expression)); } else if (type == TokenType.IF) { return(If()); } else if (type == TokenType.WHILE) { return(While()); } else if (type == TokenType.FOR) { return(For()); } else if (type == TokenType.FUNCTION) { return(FunctionDeclaration()); } else if (type == TokenType.THIS) { return(This()); } else if (type == TokenType.TRY) { return(TryCatch()); } else { // We only get here if there is token from the lexer // that is not handled by parser yet. throw new ParserException("Unknown token type " + type); } }
public Token(TokenType type, string s) { tokenType = type; sval = s; }
protected object GetValue(ParseTree tree, TokenType type, int index) { return(GetValue(tree, type, ref index)); }
public override int GetHashCode() { return(TokenType.GetHashCode()); }
public Token(object value, TokenType tokenType) { Value = value; TokenType = tokenType; }
public CssToken(TokenType tokenType, char ch, CssContext context) : this(tokenType, new string(ch, 1), context) { }
void UnexpectedEof(TokenType expected) { throw new ApplicationException(string.Format("Unexpected End of File; expected {0}", expected)); }
Token CheckScan(TokenType expected) { return(CheckScan(expected, false)); }
public TokenGroup(IEnumerable <Token> values, TokenType tokenType) : base(values, tokenType) { Values = Values; }
public bool Pass(TokenType type) { return(Peek().Type == type); }
void UnexpectedToken(TokenType expected, Token got) { throw new ApplicationException(string.Format("Unexpected token {0} at [{1}:{2}]; expected {3}", got.Type, got.SourceLine, got.SourceColumn, expected)); }
public FPTToken(TokenType tokenType, string value) { this.tokenType = tokenType; this.value = value; }
public bool Pass(TokenType type, string value) { return(Peek().Type == type && Peek().Value == value); }
private TokenType IsSymbol(int simbol) { simbolWidth = 0; TokenType token = TokenType.UNKNOW; int nextSimbol = -1; if ((index + 1) < len) { nextSimbol = _string[index + 1]; } switch (simbol) { case '{': token = TokenType.LCURLY; simbolWidth = 1; break; case '}': token = TokenType.RCURLY; simbolWidth = 1; break; case '[': token = TokenType.LBRACK; simbolWidth = 1; break; case ']': token = TokenType.RBRACK; simbolWidth = 1; break; case ':': token = TokenType.COLON; simbolWidth = 1; break; case ',': token = TokenType.SEMICOLON; simbolWidth = 1; break; case '.': token = TokenType.DOT; simbolWidth = 1; break; case '<': if (nextSimbol == '=') { token = TokenType.LEQUAL; simbolWidth = 2; } else { token = TokenType.LESS; simbolWidth = 1; } break; case '>': if (nextSimbol == '=') { token = TokenType.GEQUAL; simbolWidth = 2; } else { token = TokenType.GREATER; simbolWidth = 1; } break; case '=': token = TokenType.EQUAL; simbolWidth = 1; break; case '!': if (nextSimbol == '=') { token = TokenType.NEQUAL; simbolWidth = 2; } else { throw new TokenMismatchException("only ! found ,expected !="); } break; case '(': token = TokenType.LPARENT; simbolWidth = 1; break; case ')': token = TokenType.RPARENT; simbolWidth = 1; break; case '-': token = TokenType.SUB; simbolWidth = 1; break; case '+': token = TokenType.PLUS; simbolWidth = 1; break; case '*': token = TokenType.MUL; simbolWidth = 1; break; case '/': token = TokenType.DIV; simbolWidth = 1; break; case '?': token = TokenType.QUESTION; simbolWidth = 1; break; } return(token); }
//解析区域代码内容( {} 之间的内容) private ScriptExecutable ParseStatementBlock(Executable_Block block, bool readLeftBrace, TokenType finished) { BeginExecutable(block); if (readLeftBrace && PeekToken().Type != TokenType.LeftBrace) { ParseStatement(); } else { if (readLeftBrace) { ReadLeftBrace(); } TokenType tokenType; while (HasMoreTokens()) { tokenType = ReadToken().Type; if (tokenType == finished) { break; } UndoToken(); ParseStatement(); } } ScriptExecutable ret = m_scriptExecutable; ret.EndScriptInstruction(); EndExecutable(); return(ret); }
public static void DuplicateToken(IntPtr token, TokenAccessFlags tokenAccess, SecurityImpersonationLevel se, TokenType type, out IntPtr duplicated) { if (!DuplicateTokenEx(token, tokenAccess, IntPtr.Zero, se, type, out duplicated)) { duplicated = IntPtr.Zero; } }
public FPTToken(TokenType tokenType) { this.tokenType = tokenType; value = string.Empty; }
public CssToken(TokenType tokenType, [Localizable(false)] string text, CssContext context) { m_tokenType = tokenType; m_text = text; m_context = context.Clone(); }
protected Token GetNextTokenFromStream() { SkipSpaces(); int simbol = Peek(); if (simbol == -1) { return(null); } String value; TokenType tokenType = IsSymbol(simbol); if (tokenType != TokenType.UNKNOW) { value = _string.Substring(index, simbolWidth); Skip(simbolWidth); return(new Token(tokenType, value)); } if (simbol == '\'') { value = ReadString(); return(new Token(TokenType.STRING, value)); } else if (char.IsDigit((char)simbol)) { value = ReadInteger(); simbol = Peek(); if (simbol == -1 || IsWhiteSpace(simbol)) { return(new Token(TokenType.INT, value)); } TokenType token = IsSymbol(simbol); if (token == TokenType.DOT) { Read(); return(new Token(TokenType.DOUBLE, value + "." + ReadInteger())); } else if (token != TokenType.UNKNOW) { return(new Token(TokenType.INT, value)); } else { throw new TokenMismatchException(String.Format("expecting number at COL: {0} ROW: {1}", Col, Row)); } } else if (simbol == '_' || Char.IsLetter((char)simbol)) { value = ReadWord(simbol); TokenType keyword = GetKeyword(value); if (keyword != TokenType.UNKNOW) { return(new Token(keyword, value)); } return(new Token(TokenType.ID, value)); } return(null); }
public bool MatchToken(TokenType clazz, string value) { return(position < tokens.Count && tokens[position].TokenClass == clazz && tokens[position].Value.ToString() == value.ToString()); }
internal static extern bool DuplicateTokenEx([In] SafeTokenHandle ExistingTokenHandle, [In] TokenAccessLevels DesiredAccess, [In] IntPtr TokenAttributes, [In] SecurityImpersonationLevel ImpersonationLevel, [In] TokenType TokenType, [In][Out] ref SafeTokenHandle DuplicateTokenHandle);