/// <summary> /// Checks if the current token represents the beginning of a code block, if yes, parse every code block after the token. /// </summary> /// <param name="tokenNodeType">The current token type.</param> /// <param name="parentElement">The parent element where the potential code block will be appended as a child.</param> /// <returns><c>true</c> is a code block has been parsed, <c>false</c> otherwise.</returns> private bool TryParseCodeBlock([CanBeNull] T4TokenNodeType tokenNodeType, [NotNull] CompositeElement parentElement) { if (tokenNodeType != null) { T4CodeBlock codeBlock; if (tokenNodeType == T4TokenNodeTypes.StatementStart) { codeBlock = new T4StatementBlock(); } else if (tokenNodeType == T4TokenNodeTypes.ExpressionStart) { codeBlock = new T4ExpressionBlock(); } else if (tokenNodeType == T4TokenNodeTypes.FeatureStart) { codeBlock = new T4FeatureBlock(); } else { codeBlock = null; } if (codeBlock != null) { AppendNewChild(parentElement, ParseCodeBlock(tokenNodeType, codeBlock)); return(true); } } return(false); }
/// <summary> /// Starts lexing the whole buffer. /// </summary> public void Start() { _pos = 0; _length = _buffer.Length; _scanMode = ScanMode.Text; _currentTokenType = null; }
private void Parse([NotNull] CompositeElement parentElement) { T4TokenNodeType tokenNodeType = GetNonCodeBlockTokenType(parentElement); while (tokenNodeType != null) { if (tokenNodeType == T4TokenNodeTypes.DirectiveStart) { ParseDirective(parentElement); } else { AppendNewChild(parentElement, tokenNodeType); Advance(); } tokenNodeType = GetNonCodeBlockTokenType(parentElement); } if (_builderLexer.HasSkippedTokens) { _builderLexer.AppendSkippedTokens(parentElement); } if (_notClosedDirectives != null) { FixTopLevelSpace(parentElement, _notClosedDirectives); } }
private void LocateToken() { if (_currentTokenType == null) { _currentTokenType = Scan(); } }
/// <summary> /// Starts lexing a part of the buffer. /// </summary> /// <param name="startOffset">The starting offset.</param> /// <param name="endOffset">The ending offset.</param> /// <param name="state">The scan mode of the lexer.</param> public void Start(int startOffset, int endOffset, uint state) { _pos = startOffset; _length = endOffset; _scanMode = (ScanMode)state; _currentTokenType = null; }
private T4CodeBlock ParseCodeBlock([NotNull] T4TokenNodeType codeStartTokenNodeType, [NotNull] T4CodeBlock codeBlock) { // appends the code start token (<#/<#+/<#=) to the block AppendNewChild(codeBlock, codeStartTokenNodeType); // parse every Code token until a BlockEnd (that is also appended) or EOF is reached bool blockEnded = false; do { T4TokenNodeType nextTokenType = Advance(); if (nextTokenType == null) { AppendMissingToken(codeBlock, MissingTokenType.BlockEnd); blockEnded = true; } else { if (nextTokenType != T4TokenNodeTypes.Code) { if (nextTokenType == T4TokenNodeTypes.BlockEnd) { blockEnded = true; } else { AppendMissingToken(codeBlock, MissingTokenType.BlockEnd); } } AppendNewChild(codeBlock, nextTokenType); } }while (!blockEnded); return(codeBlock); }
private T4TokenNodeType Scan() { _tokenStart = _pos; T4TokenNodeType tokenType = ScanCore(); _tokenEnd = _pos; return(tokenType); }
private void ParseDirective([NotNull] CompositeElement parentElement) { var directive = new T4Directive(); // appends the directive start token (<#@) AppendNewChild(directive, T4TokenNodeTypes.DirectiveStart); Advance(); // builds the directive (name and attributes) var directiveBuilder = new DirectiveBuilder(this); T4TokenNodeType tokenType = GetTokenType(); while (tokenType != null && !tokenType.IsTag) { if (tokenType == T4TokenNodeTypes.Name) { directiveBuilder.AddName(); } else if (tokenType == T4TokenNodeTypes.Equal) { directiveBuilder.AddEqual(); } else if (tokenType == T4TokenNodeTypes.Quote) { directiveBuilder.AddQuote(); } else if (tokenType == T4TokenNodeTypes.Value) { directiveBuilder.AddValue(); } tokenType = Advance(); } directiveBuilder.Finish(directive); // appends the block end token if available if (tokenType == T4TokenNodeTypes.BlockEnd) { AppendNewChild(directive, T4TokenNodeTypes.BlockEnd); Advance(); } else { AppendMissingToken(directive, MissingTokenType.BlockEnd); if (_notClosedDirectives == null) { _notClosedDirectives = new List <T4Directive>(); } _notClosedDirectives.Add(directive); } AppendNewChild(parentElement, directive); // checks if we're including a file if (directive.IsSpecificDirective(_directiveInfoManager.Include)) { HandleIncludeDirective(directive, parentElement); } }
private LeafElementBase CreateCurrentToken() { T4TokenNodeType tokenType = GetTokenType(); Assertion.AssertNotNull(tokenType, "tokenType == null"); LeafElementBase token = tokenType.Create(_builderLexer.Buffer, new TreeOffset(_builderLexer.TokenStart), new TreeOffset(_builderLexer.TokenEnd)); return(token); }
/// <summary> /// Restores the lexer state. /// </summary> /// <param name="state">An instance of <see cref="State"/>.</param> private void RestoreState([CanBeNull] State state) { if (state == null) { return; } _pos = state.Pos; _tokenStart = state.TokenStart; _tokenEnd = state.TokenEnd; _scanMode = state.ScanMode; _currentTokenType = state.CurrentTokenType; }
/// <summary> /// Creates and appends a new token to the tree. /// </summary> /// <param name="parentElement">The parent element.</param> /// <param name="tokenNodeType">Type of the token node to create and add.</param> private void AppendNewChild([NotNull] CompositeElement parentElement, [NotNull] T4TokenNodeType tokenNodeType) { var token = tokenNodeType.Create(_builderLexer.Buffer, new TreeOffset(_builderLexer.TokenStart), new TreeOffset(_builderLexer.TokenEnd)); _builderLexer.AppendNewChild(parentElement, token); }
/// <summary> /// Advances the lexer to the next token. /// </summary> public void Advance() { LocateToken(); _currentTokenType = null; }
/// <summary> /// Restores the lexer state. /// </summary> /// <param name="state">An instance of <see cref="State"/>.</param> private void RestoreState([CanBeNull] State state) { if (state == null) return; _pos = state.Pos; _tokenStart = state.TokenStart; _tokenEnd = state.TokenEnd; _scanMode = state.ScanMode; _currentTokenType = state.CurrentTokenType; }
/// <summary> /// Starts lexing a part of the buffer. /// </summary> /// <param name="startOffset">The starting offset.</param> /// <param name="endOffset">The ending offset.</param> /// <param name="state">The scan mode of the lexer.</param> public void Start(int startOffset, int endOffset, uint state) { _pos = startOffset; _length = endOffset; _scanMode = (ScanMode) state; _currentTokenType = null; }
/// <summary> /// Initializes a new instance of the <see cref="T4Token"/> class. /// </summary> /// <param name="nodeType">The token type.</param> /// <param name="buffer">The buffer holding the token text.</param> /// <param name="startOffset">The token starting offset in <paramref name="buffer"/>.</param> /// <param name="endOffset">The token ending offset in <paramref name="buffer"/>.</param> public T4Token(T4TokenNodeType nodeType, IBuffer buffer, TreeOffset startOffset, TreeOffset endOffset) : base(nodeType, buffer, startOffset, endOffset) { }
private void LocateToken() { if (_currentTokenType == null) _currentTokenType = Scan(); }