public Impl(Conditional definition, IReadOnlyList <IToken> tokens, ITokenTrie trie, string id, bool initialState) { _trie = trie; _definition = definition; Tokens = tokens; _id = id; IsInitialStateOn = string.IsNullOrEmpty(id) || initialState; }
public Impl(IToken token, ITokenTrie endTokenMatcher, Include source, string id, bool initialState) { Tokens = new[] { token }; _source = source; _endTokenMatcher = endTokenMatcher; _id = id; IsInitialStateOn = string.IsNullOrEmpty(id) || initialState; }
public Impl(InlineMarkupConditional definition, IReadOnlyList <IToken> tokens, ITokenTrie structureTrie, ITokenTrie closeConditionTrie, ITokenTrie scanBackTrie, MarkupTokenMapping mapping, string id, bool initialState) { _definition = definition; Id = id; Tokens = tokens; _mapping = mapping; _structureTrie = structureTrie; _scanBackTrie = scanBackTrie; _closeConditionTrie = closeConditionTrie; IsInitialStateOn = string.IsNullOrEmpty(id) || initialState; }
/// <summary> /// Puts the tokensOfType into the tokenMasterList at indexes which are congruent to typeRemainder mod TokenTypeModulus. /// </summary> /// <param name="trie"></param> /// <param name="tokenMasterList"></param> /// <param name="tokensOfType"></param> /// <param name="typeRemainder"></param> /// <param name="encoding"></param> private void AddTokensOfTypeToTokenListAndTrie(ITokenTrie trie, List <IToken> tokenMasterList, IReadOnlyList <ITokenConfig> tokensOfType, int typeRemainder, Encoding encoding) { int tokenIndex = typeRemainder; for (int i = 0; i < tokensOfType.Count; i++) { tokenMasterList[tokenIndex] = tokensOfType[i].ToToken(encoding); trie.AddToken(tokenMasterList[tokenIndex], typeRemainder); tokenIndex += TokenTypeModulus; } }
public void SeekBackUntil(ITokenTrie match, bool consume) { byte[] buffer = new byte[match.MaxLength]; while (_target.Position > _bomSize) { if (_target.Position - _bomSize < buffer.Length) { _target.Position = _bomSize; } else { _target.Position -= buffer.Length; } int nRead = ReadExactBytes(_target, buffer, 0, buffer.Length); int best = -1; int bestPos = -1; for (int i = nRead - match.MinLength; i >= 0; --i) { int token; int ic = i; if (match.GetOperation(buffer, nRead, ref ic, out token) && ic >= bestPos) { bestPos = ic; best = token; } } if (best != -1) { _target.Position -= nRead - bestPos + (consume ? match.TokenLength[best] : 0); _target.SetLength(_target.Position); return; } //Back up the amount we already read to get a new window of data in if (_target.Position - _bomSize < buffer.Length) { _target.Position = _bomSize; } else { _target.Position -= buffer.Length; } } if (_target.Position == _bomSize) { _target.SetLength(_bomSize); } }
public void SeekBackWhile(ITokenTrie match) { byte[] buffer = new byte[match.MaxLength]; while (_target.Position > _bomSize) { if (_target.Position - _bomSize < buffer.Length) { _target.Position = _bomSize; } else { _target.Position -= buffer.Length; } int nRead = ReadExactBytes(_target, buffer, 0, buffer.Length); bool anyMatch = false; int token = -1; int i = nRead - match.MinLength; for (; i >= 0; --i) { if (match.GetOperation(buffer, nRead, ref i, out token)) { i -= match.TokenLength[token]; anyMatch = true; break; } } if (!anyMatch || (token != -1 && i + match.TokenLength[token] != nRead)) { _target.SetLength(_target.Position); return; } //Back up the amount we already read to get a new window of data in if (_target.Position - _bomSize < buffer.Length) { _target.Position = _bomSize; } else { _target.Position -= buffer.Length; } } if (_target.Position == _bomSize) { _target.SetLength(_bomSize); } }
private void BaseSeekForward(ITokenTrie match, ref int bufferLength, ref int currentBufferPosition, bool consumeToken) { while (bufferLength >= match.MinLength) { //Try to get at least the max length of the tree into the buffer if (bufferLength - currentBufferPosition < match.MaxLength) { AdvanceBuffer(currentBufferPosition); currentBufferPosition = CurrentBufferPosition; bufferLength = CurrentBufferLength; } int sz = bufferLength == CurrentBuffer.Length ? match.MaxLength : match.MinLength; for (; currentBufferPosition < bufferLength - sz + 1; ++currentBufferPosition) { if (bufferLength == 0) { currentBufferPosition = 0; return; } int token; if (match.GetOperation(CurrentBuffer, bufferLength, ref currentBufferPosition, out token)) { if (!consumeToken) { currentBufferPosition -= match.Tokens[token].Length; } return; } } } //Ran out of places to check and haven't reached the actual match, consume all the way to the end currentBufferPosition = bufferLength; }
public void SeekForwardWhile(ITokenTrie trie, ref int bufferLength, ref int currentBufferPosition) { while (bufferLength > trie.MinLength) { while (currentBufferPosition < bufferLength - trie.MinLength + 1) { if (bufferLength == 0) { currentBufferPosition = 0; return; } int token; if (!trie.GetOperation(CurrentBuffer, bufferLength, ref currentBufferPosition, out token)) { return; } } AdvanceBuffer(currentBufferPosition); currentBufferPosition = CurrentBufferPosition; bufferLength = CurrentBufferLength; } }
public void SeekForwardWhile(ITokenTrie trie, ref int bufferLength, ref int currentBufferPosition) { throw new NotImplementedException(); }
public void SeekBackWhile(ITokenTrie match) { throw new NotImplementedException(); }
public void SeekBackUntil(ITokenTrie match, bool consume) { throw new NotImplementedException(); }
public void SeekForwardUntil(ITokenTrie trie, ref int bufferLength, ref int currentBufferPosition) { BaseSeekForward(trie, ref bufferLength, ref currentBufferPosition, false); }
public void SeekForwardThrough(ITokenTrie trie, ref int bufferLength, ref int currentBufferPosition) { BaseSeekForward(trie, ref bufferLength, ref currentBufferPosition, true); }
public void SeekBackUntil(ITokenTrie match) { SeekBackUntil(match, false); }
public static ScopeBuilder <TOperator, TToken> ScopeBuilder <TOperator, TToken>(this IProcessorState processor, ITokenTrie tokens, IOperatorMap <TOperator, TToken> operatorMap, bool dereferenceInLiterals = false) where TOperator : struct where TToken : struct { return(new ScopeBuilder <TOperator, TToken>(processor, tokens, operatorMap, dereferenceInLiterals)); }