public BinaryExpression(IASTLocation location, IExpression left, IExpression right, ITokenType type) { Location = location; Left = left; Right = right; Type = type; }
/// <summary> /// Initializes a new instance of the <see cref="Syntax" /> class. /// </summary> /// <param name="tokenTypes">The token types.</param> /// <param name="rightToLeftParsing">Indicates if the parsing should be done from the right to the left direction..</param> /// <param name="perfectMatchOnly">Indicates if the syntax requires that all input should be consumed in order to satisfy it.</param> /// <param name="culture">The syntax culture.</param> public Syntax(ITokenType[] tokenTypes, bool rightToLeftParsing, bool perfectMatchOnly, CultureInfo culture) { if (tokenTypes == null) throw new ArgumentNullException(nameof(tokenTypes)); if (tokenTypes.Length == 0) throw new ArgumentException("A syntax must contains at least one token type", nameof(tokenTypes)); if (culture == null) throw new ArgumentNullException(nameof(culture)); TokenTypes = tokenTypes; RightToLeftParsing = rightToLeftParsing; PerfectMatchOnly = perfectMatchOnly; Culture = culture; }
/// <summary> /// Constructs a Token object with given <see cref="Source"/> and <see cref="ITokenType"/>. /// </summary> /// <param name="source">The source of the token.</param> /// <param name="tokenType">The type of the token.</param> public Token([NotNull] Source source, [NotNull] ITokenType tokenType) { Source = source ?? throw new ArgumentNullException(nameof(source)); TokenType = tokenType ?? throw new ArgumentNullException(nameof(tokenType)); LineNumber = source.LineNumber; Position = source.CurrentPosition; // ReSharper disable once VirtualMemberCallInConstructor => You should follow LSP when deriving and then it will be ok. // This provides opportunity to extract different amount of characters from the source. Extract(); }
public void CheckCanLexFixedTokens(ITokenType tokenType) { var startPosition = new SourcePosition("test", 1, 0, 0); var lexer = new Lex.Lexer(); var results = lexer.Lex(tokenType.FixedSource, new SourcePosition("test", 1, 0, 0)).ToList(); Assert.That(results.Count == 1); Assert.That(results[0].Type == tokenType); Assert.That(results[0].Value == tokenType.FixedSource); Assert.AreEqual(results[0].Location.Begin, startPosition); Assert.AreEqual(results[0].Location.End, tokenType.FixedSource.Aggregate(startPosition, (pos, character) => pos.Advance(character, lexer.tabsize))); }
private int GetIndexOfLastOpenedAndNotClosed(Token[] tokens, ITokenType tokenType) { for (var i = tokens.Length - 1; i >= 0; i--) { if (tokens[i].Type.GetType() == tokenType.GetType()) { return(i); } } return(-1); }
protected override ITokenType Match(string input, ITokenType tokenType, out string match) { string found = null; if (tokenType != null) { tokenType = (found = tokenType.Match(input, Offset)).Length > 0 ? tokenType : null; } else { tokenType = TokenTypes.FirstOrDefault(item => (found = item.Match(input, Offset)).Length > 0); } match = found ?? string.Empty; return(tokenType); }
public TokenStream Is(ITokenType type) { if (_error) { return(this); } if (Position >= _tokens.Count || _tokens[Position].Type != type) { _error = true; return(this); } _lastElement = _tokens[Position]; Position++; return(this); }
private bool CheckIfClosing(char ch, IEnumerable <ITokenType> possibleTypes) { var left = reader.Position == 0 ? ' ' : MdText[reader.Position - 1]; var right = GetRightSide(reader.Position + 1); foreach (var tokenType in possibleTypes) { if (!tokenType.CheckIfClosing(ch, left, right)) { continue; } currentUnderscoreIsClosing = true; currentUnderscoreType = tokenType; return(true); } return(false); }
/// <summary> /// Converts to a <see cref="Syntax" /> instance, using the provided token types. /// </summary> /// <param name="tokenTypeTypeDictionary">The token type dictionary.</param> /// <param name="culture">The syntax culture.</param> /// <returns></returns> /// <exception cref="System.ArgumentException">syntaxPattern</exception> internal Syntax GetSyntax(IDictionary <string, Type> tokenTypeTypeDictionary, CultureInfo culture) { var tokenTypes = new ITokenType[Tokens.Count()]; for (var i = 0; i < Tokens.Count(); i++) { var syntaxToken = Tokens[i]; if (tokenTypes.Any(t => t != null && t.Name.Equals(syntaxToken.Name))) { throw new InvalidOperationException( $"The token name '{syntaxToken.Name}' is duplicated in the syntax definition"); } tokenTypes[i] = syntaxToken.ToTokenType(tokenTypeTypeDictionary); } var syntax = new Syntax(tokenTypes, RightToLeftParsing, PerfectMatchOnly, culture); return(syntax); }
public TokenCreator(ITokenType tokenType, Regex regex) { TokenType = tokenType; Regex = regex; }
protected static Exception SyntaxError(string parameterName, ILexer lexer, ITokenType expectedTokenType) => SyntaxError(parameterName, lexer, null, expectedTokenType);
protected static Exception SyntaxError(string parameterName, ILexer lexer, IToken token, ITokenType expectedTokenType) { parameterName = parameterName ?? throw new ArgumentNullException(nameof(parameterName)); lexer = lexer ?? throw new ArgumentNullException(nameof(lexer)); token = token ?? lexer.NewToken(TokenType.EOF, string.Empty); var message = $"syntax error: {expectedTokenType ?? TokenType.EOF} expected but found {token} at {token.SourceInfo}"; return(new ArgumentException(message, parameterName)); }
/// <summary> /// Converts to a <see cref="Syntax" /> instance, using the provided token types. /// </summary> /// <param name="tokenTypeTypeDictionary">The token type dictionary.</param> /// <param name="culture">The syntax culture.</param> /// <returns></returns> /// <exception cref="System.ArgumentException">syntaxPattern</exception> internal Syntax GetSyntax(IDictionary<string, Type> tokenTypeTypeDictionary, CultureInfo culture) { var tokenTypes = new ITokenType[Tokens.Count()]; for (var i = 0; i < Tokens.Count(); i++) { var syntaxToken = Tokens[i]; if (tokenTypes.Any(t => t != null && t.Name.Equals(syntaxToken.Name))) { throw new InvalidOperationException( $"The token name '{syntaxToken.Name}' is duplicated in the syntax definition"); } tokenTypes[i] = syntaxToken.ToTokenType(tokenTypeTypeDictionary); } var syntax = new Syntax(tokenTypes, RightToLeftParsing, PerfectMatchOnly, culture); return syntax; }
/// <summary> /// Initializes a new instance of the <see cref="Token" /> class. /// </summary> /// <param name="value">The value.</param> /// <param name="source">The source.</param> /// <param name="type">The type.</param> public Token(object value, TokenSource source, ITokenType type) { Value = value; Source = source; Type = type; }
public Token(ITokenType type, string text) { Type = type; Text = text ?? throw new ArgumentNullException(paramName: nameof(text)); }
public Token(ITokenType type, string value, int index) { Index = index; Value = value; TokenType = type; }
public TokenStream Bounds(string left, ITokenType token, string right) { return(Is(left).Is(token).Is(right)); }
private static List <Token> DivideToTokens(this string compressedCode) { var result = new List <Token>(); var word = new StringBuilder(); int position = 0; var types = TokenTypePool.FastResolvingTypes; bool match = false; ITokenType lastType = null; int lineNumber = 1; while (position < compressedCode.Length) { word.Append(compressedCode[position]); match = false; foreach (var type in types) { if (type.Is(word.ToString())) { lastType = type; match = true; break; } } if (!match && lastType != null) { if (lastType != TokenTypePool.Space && lastType != TokenTypePool.NextLine) { result.Add(new Token(lastType, word.Remove(word.Length - 1, 1).ToString(), lineNumber)); } if (lastType == TokenTypePool.NextLine) { lineNumber++; } lastType = null; word.Clear(); } else { position++; } } if (lastType == null) { string message = ""; if (result.Count == 0) { message = "Unknown token in code start position."; } else { var token = result.Last(); message = $"Unknown token after element {token.Value} at line {token.Line}."; } throw new LexicalAnalizeException(message); } if (lastType != TokenTypePool.Space && lastType != TokenTypePool.NextLine) { result.Add(new Token(lastType, word.ToString(), lineNumber)); } return(result); }
public Token[] GetTokens() { var tokens = new List <Token>(); while (reader.Position < MdText.Length - 1) { currentUnderscoreType = null; var previousType = typesNesting.Any() ? typesNesting.Peek() : null; var openTypes = previousType == null ? allTokenTypes : previousType.SupportedInnerTypes(); var token = reader.ReadUntil(ch => CheckIfOpen(ch, openTypes) || CheckIfClosing(ch, typesNesting)); token.Type = previousType; if (!previousUnderscoreIsClosing) { token.Opened = true; } if (currentUnderscoreIsClosing) { token.Closed = true; } if (!currentUnderscoreIsClosing) { if (currentUnderscoreType != null) { typesNesting.Push(currentUnderscoreType); } tokens.Add(token); } else { if (currentUnderscoreType.GetType() != typesNesting.Peek().GetType()) { tokens[tokens.Count - 1].Concat(token); while (typesNesting.Peek().GetType() != currentUnderscoreType.GetType()) { typesNesting.Pop(); } } else { tokens.Add(token); } typesNesting.Pop(); } previousUnderscoreIsClosing = currentUnderscoreIsClosing; currentUnderscoreIsClosing = false; if (currentUnderscoreType != null) { reader.Skip(currentUnderscoreType.GetMarker().Length); } } if (!typesNesting.Any() || typesNesting.Peek() == null) { return(tokens.ToArray()); } while (typesNesting.Count != 0) { var indexOfLastOpenedAndNotClosed = GetIndexOfLastOpenedAndNotClosed(tokens.Where(token => token != null).ToArray(), typesNesting.Pop()); tokens[indexOfLastOpenedAndNotClosed - 1].Concat(tokens[indexOfLastOpenedAndNotClosed]); tokens[indexOfLastOpenedAndNotClosed] = null; } return(tokens.ToArray()); }
public LexedToken(SourceRange loc, ITokenType type, string value) { Location = loc; Type = type; Value = value; }
public Token(ITokenType type, string value, int line) { Type = type; Value = value; Line = line; }
public Token(string text, ITokenType type, int start) { Text = text; Type = type; Start = start; }
protected override ITokenType Match(string input, ITokenType tokenType, out string match) { var at = Offset; var ch = input[at]; var from = at; switch (ch) { case '(': case ')': match = new string(new[] { ch }); return(ch == '(' ? Opening : Closing); default: if (char.IsWhiteSpace(ch)) { while ((++at < input.Length) && char.IsWhiteSpace(input[at])) { ; } match = input.Substring(from, at - from); return(WhiteSpace); } else if ((ch == '_') || (('A' <= ch) && (ch <= 'Z')) || (('a' <= ch) && (ch <= 'z'))) { while ( (++at < input.Length) && ( ((ch = input[at]) == '_') || (('A' <= ch) && (ch <= 'Z')) || (('a' <= ch) && (ch <= 'z')) || (('0' <= ch) && (ch <= '9')) ) ) { ; } match = input.Substring(from, at - from); return(Identifier); } else if (ch == '"') { ch = (char)0; while ((++at < input.Length) && ((ch = input[at]) != '"')) { ; } match = ch == '"' ? input.Substring(from, at - from + 1) : string.Empty; return(match.Length > 0 ? Text : null); } else if (('0' <= ch) && (ch <= '9')) { while ( (++at < input.Length) && ('0' <= (ch = input[at])) && (ch <= '9') ) { ; } match = input.Substring(from, at - from); return(Number); } else { match = new string(new[] { ch }); return(Unexpected); } } }