public void VerifyTokenTrieNotEnoughBufferLeft() { byte[] hello = Encoding.UTF8.GetBytes("hello"); byte[] helloBang = Encoding.UTF8.GetBytes("hello!"); TokenTrie t = new TokenTrie(); t.AddToken(hello); t.AddToken(helloBang); byte[] source1 = Encoding.UTF8.GetBytes("hi"); byte[] source2 = Encoding.UTF8.GetBytes(" hello"); int token; int pos = 0; Assert.False(t.GetOperation(source1, source1.Length, ref pos, out token)); Assert.Equal(-1, token); pos = 1; Assert.True(t.GetOperation(source2, source2.Length, ref pos, out token)); Assert.Equal(0, token); pos = 2; Assert.False(t.GetOperation(source2, source2.Length, ref pos, out token)); Assert.Equal(-1, token); }
public void VerifyTokenTrieCombine() { byte[] hello = Encoding.UTF8.GetBytes("hello"); byte[] helloBang = Encoding.UTF8.GetBytes("hello!"); byte[] hi = Encoding.UTF8.GetBytes("hi"); byte[] there = Encoding.UTF8.GetBytes("there!"); TokenTrie t = new TokenTrie(); t.AddToken(hello); t.AddToken(helloBang); TokenTrie t2 = new TokenTrie(); t.AddToken(hi); t.AddToken(there); TokenTrie combined = new TokenTrie(); combined.Append(t); combined.Append(t2); byte[] source1 = Encoding.UTF8.GetBytes("hello there"); byte[] source2 = Encoding.UTF8.GetBytes("hello! there"); byte[] source3 = Encoding.UTF8.GetBytes("hi there"); byte[] source4 = Encoding.UTF8.GetBytes("there!"); int token; int pos = 0; Assert.True(t.GetOperation(source1, source1.Length, ref pos, out token)); Assert.Equal(0, token); pos = 0; Assert.True(t.GetOperation(source2, source2.Length, ref pos, out token)); Assert.Equal(1, token); pos = 0; Assert.True(t.GetOperation(source3, source3.Length, ref pos, out token)); Assert.Equal(2, token); pos = 0; Assert.True(t.GetOperation(source4, source4.Length, ref pos, out token)); Assert.Equal(3, token); }
private void ScanToCloseCondition(IProcessorState processorState, List <byte> conditionBytes, ref int bufferLength, ref int currentBufferPosition) { int previousPosition = currentBufferPosition; while (bufferLength >= _closeConditionTrie.MinLength) { //Try to get at least the max length of the tree into the buffer if (bufferLength - currentBufferPosition < _closeConditionTrie.MaxLength) { conditionBytes.AddRange(processorState.CurrentBuffer.Skip(previousPosition).Take(currentBufferPosition - previousPosition)); processorState.AdvanceBuffer(currentBufferPosition); currentBufferPosition = processorState.CurrentBufferPosition; bufferLength = processorState.CurrentBufferLength; previousPosition = 0; } int sz = bufferLength == processorState.CurrentBuffer.Length ? _closeConditionTrie.MaxLength : _closeConditionTrie.MinLength; for (; currentBufferPosition < bufferLength - sz + 1; ++currentBufferPosition) { if (bufferLength == 0) { currentBufferPosition = 0; return; } int token; if (_closeConditionTrie.GetOperation(processorState.CurrentBuffer, bufferLength, ref currentBufferPosition, out token)) { conditionBytes.AddRange(processorState.CurrentBuffer.Skip(previousPosition).Take(currentBufferPosition - previousPosition - _closeConditionTrie.Tokens[token].Length)); return; } } } //Ran out of places to check and haven't reached the actual match, consume all the way to the end currentBufferPosition = bufferLength; }
public void VerifyTokenTrieAtBegin() { byte[] hello = Encoding.UTF8.GetBytes("hello"); byte[] helloBang = Encoding.UTF8.GetBytes("hello!"); byte[] hi = Encoding.UTF8.GetBytes("hi"); TokenTrie t = new TokenTrie(); t.AddToken(hello); t.AddToken(helloBang); t.AddToken(hi); byte[] source1 = Encoding.UTF8.GetBytes("hello there"); byte[] source2 = Encoding.UTF8.GetBytes("hello1 there"); byte[] source3 = Encoding.UTF8.GetBytes("hello! there"); byte[] source4 = Encoding.UTF8.GetBytes("hi there"); byte[] source5 = Encoding.UTF8.GetBytes("hi"); byte[] source6 = Encoding.UTF8.GetBytes("he"); int token; int pos = 0; Assert.True(t.GetOperation(source1, source1.Length, ref pos, out token)); Assert.Equal(0, token); pos = 0; Assert.True(t.GetOperation(source2, source2.Length, ref pos, out token)); Assert.Equal(0, token); pos = 0; Assert.True(t.GetOperation(source3, source3.Length, ref pos, out token)); Assert.Equal(1, token); pos = 0; Assert.True(t.GetOperation(source4, source4.Length, ref pos, out token)); Assert.Equal(2, token); pos = 0; Assert.True(t.GetOperation(source5, source5.Length, ref pos, out token)); Assert.Equal(2, token); pos = 0; Assert.False(t.GetOperation(source6, source6.Length, ref pos, out token)); Assert.Equal(-1, token); }
public void VerifyTokenTrieLookArounds(string original, int checkPosition, int expectedPosition, bool success, string after, string value, string before) { byte[] data = Encoding.UTF8.GetBytes(original); TokenTrie t = new TokenTrie(); TokenConfig builder = (value ?? "").TokenConfigBuilder(); if (!string.IsNullOrEmpty(after)) { builder = builder.OnlyIfAfter(after); } if (!string.IsNullOrEmpty(before)) { builder = builder.OnlyIfBefore(before); } t.AddToken(builder.ToToken(Encoding.UTF8)); int pos = checkPosition; Assert.Equal(success, t.GetOperation(data, data.Length, ref pos, out int token)); Assert.Equal(expectedPosition, pos); }
public static bool Evaluate(IProcessorState processor, ref int bufferLength, ref int currentBufferPosition, out bool faulted) { faulted = false; TokenTrie trie = new TokenTrie(); //Logic trie.AddToken(processor.Encoding.GetBytes("&&"), 0); trie.AddToken(processor.Encoding.GetBytes("||"), 1); trie.AddToken(processor.Encoding.GetBytes("^"), 2); trie.AddToken(processor.Encoding.GetBytes("!"), 3); trie.AddToken(processor.Encoding.GetBytes(">"), 4); trie.AddToken(processor.Encoding.GetBytes(">="), 5); trie.AddToken(processor.Encoding.GetBytes("<"), 6); trie.AddToken(processor.Encoding.GetBytes("<="), 7); trie.AddToken(processor.Encoding.GetBytes("=="), 8); trie.AddToken(processor.Encoding.GetBytes("="), 9); trie.AddToken(processor.Encoding.GetBytes("!="), 10); //Bitwise trie.AddToken(processor.Encoding.GetBytes("&"), 11); trie.AddToken(processor.Encoding.GetBytes("|"), 12); trie.AddToken(processor.Encoding.GetBytes("<<"), 13); trie.AddToken(processor.Encoding.GetBytes(">>"), 14); //Braces trie.AddToken(processor.Encoding.GetBytes("("), 15); trie.AddToken(processor.Encoding.GetBytes(")"), 16); //Whitespace trie.AddToken(processor.Encoding.GetBytes(" "), 17); trie.AddToken(processor.Encoding.GetBytes("\t"), 18); //EOLs trie.AddToken(processor.Encoding.GetBytes("\r\n"), 19); trie.AddToken(processor.Encoding.GetBytes("\n"), 20); trie.AddToken(processor.Encoding.GetBytes("\r"), 21); // quotes trie.AddToken(processor.Encoding.GetBytes("\""), 22); trie.AddToken(processor.Encoding.GetBytes("'"), 23); //Tokens trie.Append(processor.EncodingConfig.Variables); //Run forward to EOL and collect args TokenFamily currentTokenFamily; List <byte> currentTokenBytes = new List <byte>(); List <TokenRef> tokens = new List <TokenRef>(); if (!trie.GetOperation(processor.CurrentBuffer, bufferLength, ref currentBufferPosition, out int token)) { currentTokenFamily = TokenFamily.Literal; currentTokenBytes.Add(processor.CurrentBuffer[currentBufferPosition++]); } else if (token > ReservedTokenMaxIndex) { currentTokenFamily = TokenFamily.Reference | (TokenFamily)token; tokens.Add(new TokenRef { Family = currentTokenFamily }); } else { currentTokenFamily = (TokenFamily)token; if (currentTokenFamily != TokenFamily.WindowsEOL && currentTokenFamily != TokenFamily.LegacyMacEOL && currentTokenFamily != TokenFamily.UnixEOL) { tokens.Add(new TokenRef { Family = currentTokenFamily }); } else { return(EvaluateCondition(tokens, processor.EncodingConfig.VariableValues)); } } int braceDepth = 0; if (tokens[0].Family == TokenFamily.OpenBrace) { ++braceDepth; } bool first = true; QuotedRegionKind inQuoteType = QuotedRegionKind.None; while ((first || braceDepth > 0) && bufferLength > 0) { int targetLen = Math.Min(bufferLength, trie.MaxLength); for (; currentBufferPosition < bufferLength - targetLen + 1;) { int oldBufferPos = currentBufferPosition; if (trie.GetOperation(processor.CurrentBuffer, bufferLength, ref currentBufferPosition, out token)) { if (braceDepth == 0) { switch (tokens[tokens.Count - 1].Family) { case TokenFamily.Whitespace: case TokenFamily.Tab: case TokenFamily.CloseBrace: case TokenFamily.WindowsEOL: case TokenFamily.UnixEOL: case TokenFamily.LegacyMacEOL: TokenFamily thisFamily = (TokenFamily)token; if (thisFamily == TokenFamily.WindowsEOL || thisFamily == TokenFamily.UnixEOL || thisFamily == TokenFamily.LegacyMacEOL) { currentBufferPosition = oldBufferPos; } break; default: currentBufferPosition = oldBufferPos; first = false; break; } if (!first) { break; } } // We matched an item, so whatever this is, it's not a literal. // if the current token is a literal, end it. if (currentTokenFamily == TokenFamily.Literal) { string literal = processor.Encoding.GetString(currentTokenBytes.ToArray()); tokens.Add(new TokenRef { Family = TokenFamily.Literal, Literal = literal }); currentTokenBytes.Clear(); } TokenFamily foundTokenFamily = (TokenFamily)token; if (foundTokenFamily == TokenFamily.QuotedLiteral || foundTokenFamily == TokenFamily.SingleQuotedLiteral) { QuotedRegionKind incomingQuoteKind; switch (foundTokenFamily) { case TokenFamily.QuotedLiteral: incomingQuoteKind = QuotedRegionKind.DoubleQuoteRegion; break; case TokenFamily.SingleQuotedLiteral: incomingQuoteKind = QuotedRegionKind.SingleQuoteRegion; break; default: incomingQuoteKind = QuotedRegionKind.None; break; } if (inQuoteType == QuotedRegionKind.None) { // starting quote found currentTokenBytes.AddRange(trie.Tokens[token].Value); inQuoteType = incomingQuoteKind; } else if (incomingQuoteKind == inQuoteType) { // end quote found currentTokenBytes.AddRange(trie.Tokens[token].Value); tokens.Add(new TokenRef { Family = TokenFamily.Literal, Literal = processor.Encoding.GetString(currentTokenBytes.ToArray()) }); currentTokenBytes.Clear(); inQuoteType = QuotedRegionKind.None; } else { // this is a different quote type. Treat it like a non-match, just add the token to the currentTokenBytes currentTokenBytes.AddRange(trie.Tokens[token].Value); } } else if (inQuoteType != QuotedRegionKind.None) { // we're inside a quoted literal, the token found by the trie should not be processed, just included with the literal currentTokenBytes.AddRange(trie.Tokens[token].Value); } else if (token > ReservedTokenMaxIndex) { currentTokenFamily = TokenFamily.Reference | (TokenFamily)token; tokens.Add(new TokenRef { Family = currentTokenFamily }); } else { //If we have a normal token... currentTokenFamily = (TokenFamily)token; if (currentTokenFamily != TokenFamily.WindowsEOL && currentTokenFamily != TokenFamily.LegacyMacEOL && currentTokenFamily != TokenFamily.UnixEOL) { switch (currentTokenFamily) { case TokenFamily.OpenBrace: ++braceDepth; break; case TokenFamily.CloseBrace: --braceDepth; break; } tokens.Add(new TokenRef { Family = currentTokenFamily }); } else { return(EvaluateCondition(tokens, processor.EncodingConfig.VariableValues)); } } } else if (inQuoteType != QuotedRegionKind.None) { // we're in a quoted literal but did not match a token at the current position. // so just add the current byte to the currentTokenBytes currentTokenBytes.Add(processor.CurrentBuffer[currentBufferPosition++]); } else if (braceDepth > 0) { currentTokenFamily = TokenFamily.Literal; currentTokenBytes.Add(processor.CurrentBuffer[currentBufferPosition++]); } else { first = false; break; } } processor.AdvanceBuffer(currentBufferPosition); currentBufferPosition = processor.CurrentBufferPosition; bufferLength = processor.CurrentBufferLength; } #if DEBUG Debug.Assert( inQuoteType == QuotedRegionKind.None, $"Malformed predicate due to unmatched quotes. InitialBuffer = {processor.Encoding.GetString(processor.CurrentBuffer)} currentTokenFamily = {currentTokenFamily} | TokenFamily.QuotedLiteral = {TokenFamily.QuotedLiteral} | TokenFamily.SingleQuotedLiteral = {TokenFamily.SingleQuotedLiteral}"); #endif return(EvaluateCondition(tokens, processor.EncodingConfig.VariableValues)); }
public static bool CppStyleEvaluator(IProcessorState processor, ref int bufferLength, ref int currentBufferPosition) { TokenTrie trie = new TokenTrie(); //Logic trie.AddToken(processor.Encoding.GetBytes("&&"), 0); trie.AddToken(processor.Encoding.GetBytes("||"), 1); trie.AddToken(processor.Encoding.GetBytes("^"), 2); trie.AddToken(processor.Encoding.GetBytes("!"), 3); trie.AddToken(processor.Encoding.GetBytes(">"), 4); trie.AddToken(processor.Encoding.GetBytes(">="), 5); trie.AddToken(processor.Encoding.GetBytes("<"), 6); trie.AddToken(processor.Encoding.GetBytes("<="), 7); trie.AddToken(processor.Encoding.GetBytes("=="), 8); trie.AddToken(processor.Encoding.GetBytes("="), 9); trie.AddToken(processor.Encoding.GetBytes("!="), 10); //Bitwise trie.AddToken(processor.Encoding.GetBytes("&"), 11); trie.AddToken(processor.Encoding.GetBytes("|"), 12); trie.AddToken(processor.Encoding.GetBytes("<<"), 13); trie.AddToken(processor.Encoding.GetBytes(">>"), 14); //Braces trie.AddToken(processor.Encoding.GetBytes("("), 15); trie.AddToken(processor.Encoding.GetBytes(")"), 16); //Whitespace trie.AddToken(processor.Encoding.GetBytes(" "), 17); trie.AddToken(processor.Encoding.GetBytes("\t"), 18); //EOLs trie.AddToken(processor.Encoding.GetBytes("\r\n"), 19); trie.AddToken(processor.Encoding.GetBytes("\n"), 20); trie.AddToken(processor.Encoding.GetBytes("\r"), 21); //Tokens trie.Append(processor.EncodingConfig.Variables); //Run forward to EOL and collect args TokenFamily currentTokenFamily; List <byte> currentTokenBytes = new List <byte>(); List <TokenRef> tokens = new List <TokenRef>(); int token; if (!trie.GetOperation(processor.CurrentBuffer, bufferLength, ref currentBufferPosition, out token)) { currentTokenFamily = TokenFamily.Literal; currentTokenBytes.Add(processor.CurrentBuffer[currentBufferPosition++]); } else if (token > ReservedTokenMaxIndex) { currentTokenFamily = TokenFamily.Reference | (TokenFamily)token; tokens.Add(new TokenRef { Family = currentTokenFamily }); } else { currentTokenFamily = (TokenFamily)token; if (currentTokenFamily != TokenFamily.WindowsEOL && currentTokenFamily != TokenFamily.LegacyMacEOL && currentTokenFamily != TokenFamily.UnixEOL) { tokens.Add(new TokenRef { Family = currentTokenFamily }); } else { return(EvaluateCondition(tokens, processor.EncodingConfig.VariableValues)); } } int braceDepth = 0; if (tokens[0].Family == TokenFamily.OpenBrace) { ++braceDepth; } bool first = true; while ((first || braceDepth > 0) && bufferLength > 0) { int targetLen = Math.Min(bufferLength, trie.MaxLength); for (; currentBufferPosition < bufferLength - targetLen + 1;) { int oldBufferPos = currentBufferPosition; if (trie.GetOperation(processor.CurrentBuffer, bufferLength, ref currentBufferPosition, out token)) { if (braceDepth == 0) { switch (tokens[tokens.Count - 1].Family) { case TokenFamily.Whitespace: case TokenFamily.Tab: case TokenFamily.CloseBrace: case TokenFamily.WindowsEOL: case TokenFamily.UnixEOL: case TokenFamily.LegacyMacEOL: TokenFamily thisFamily = (TokenFamily)token; if (thisFamily == TokenFamily.WindowsEOL || thisFamily == TokenFamily.UnixEOL || thisFamily == TokenFamily.LegacyMacEOL) { currentBufferPosition = oldBufferPos; } break; default: currentBufferPosition = oldBufferPos; first = false; break; } if (!first) { break; } } //We matched an item, so whatever this is, it's not a literal, end the current literal if that's // what we currently have if (currentTokenFamily == TokenFamily.Literal) { string literal = processor.Encoding.GetString(currentTokenBytes.ToArray()); tokens.Add(new TokenRef { Family = TokenFamily.Literal, Literal = literal }); currentTokenBytes.Clear(); } //If we have a token from the args... if (token > ReservedTokenMaxIndex) { if (currentTokenFamily == TokenFamily.Literal) { TokenRef previous = tokens[tokens.Count - 1]; previous.Literal += processor.Encoding.GetString(currentTokenBytes.ToArray()); currentTokenBytes = processor.Encoding.GetBytes(previous.Literal).ToList(); tokens.RemoveAt(tokens.Count - 1); } else { currentTokenFamily = TokenFamily.Reference | (TokenFamily)token; tokens.Add(new TokenRef { Family = currentTokenFamily }); } } //If we have a normal token... else { currentTokenFamily = (TokenFamily)token; if (currentTokenFamily != TokenFamily.WindowsEOL && currentTokenFamily != TokenFamily.LegacyMacEOL && currentTokenFamily != TokenFamily.UnixEOL) { if (currentTokenFamily == TokenFamily.OpenBrace) { ++braceDepth; } else if (currentTokenFamily == TokenFamily.CloseBrace) { --braceDepth; } tokens.Add(new TokenRef { Family = currentTokenFamily }); } else { return(EvaluateCondition(tokens, processor.EncodingConfig.VariableValues)); } } } else if (braceDepth > 0) { currentTokenFamily = TokenFamily.Literal; currentTokenBytes.Add(processor.CurrentBuffer[currentBufferPosition++]); } else { first = false; break; } } processor.AdvanceBuffer(currentBufferPosition); currentBufferPosition = processor.CurrentBufferPosition; bufferLength = processor.CurrentBufferLength; } return(EvaluateCondition(tokens, processor.EncodingConfig.VariableValues)); }
public int HandleMatch(IProcessorState processor, int bufferLength, ref int currentBufferPosition, int token, Stream target) { bool flag; if (processor.Config.Flags.TryGetValue(Include.OperationName, out flag) && !flag) { byte[] tokenValue = Tokens[token]; target.Write(tokenValue, 0, tokenValue.Length); return(tokenValue.Length); } List <byte> pathBytes = new List <byte>(); while (!_endTokenMatcher.GetOperation(processor.CurrentBuffer, bufferLength, ref currentBufferPosition, out token)) { pathBytes.Add(processor.CurrentBuffer[currentBufferPosition++]); if (bufferLength - currentBufferPosition < _endTokenMatcher.MinLength) { processor.AdvanceBuffer(currentBufferPosition); bufferLength = processor.CurrentBufferLength; currentBufferPosition = 0; if (bufferLength == 0) { break; } } } byte[] pathBytesArray = pathBytes.ToArray(); string sourceLocation = processor.Encoding.GetString(pathBytesArray).Trim(); const int pageSize = 65536; //Start off with a 64K buffer, we'll keep adding chunks to this byte[] composite = new byte[pageSize]; int totalLength; using (Stream data = _source.SourceStreamOpener(sourceLocation)) { int index = composite.Length - pageSize; int nRead = data.Read(composite, index, pageSize); //As long as we're reading whole pages, keep allocating more space ahead while (nRead == pageSize) { byte[] newBuffer = new byte[composite.Length + pageSize]; Buffer.BlockCopy(composite, 0, newBuffer, 0, composite.Length); composite = newBuffer; nRead = data.Read(composite, index, pageSize); } totalLength = composite.Length - (pageSize - nRead); } byte[] bom; Encoding realEncoding = EncodingUtil.Detect(composite, totalLength, out bom); if (!Equals(realEncoding, processor.Encoding)) { composite = Encoding.Convert(realEncoding, processor.Encoding, composite, bom.Length, totalLength - bom.Length); totalLength = composite.Length; } target.Write(composite, 0, totalLength - bom.Length); return(composite.Length); }
private void FindEnd(IProcessorState processorState, ref int bufferLength, ref int currentBufferPosition) { int depth = 1; bool inElement = true; while (bufferLength >= _structureTrie.MinLength) { //Try to get at least the max length of the tree into the buffer if (bufferLength - currentBufferPosition < _structureTrie.MaxLength) { processorState.AdvanceBuffer(currentBufferPosition); currentBufferPosition = processorState.CurrentBufferPosition; bufferLength = processorState.CurrentBufferLength; } int sz = bufferLength == processorState.CurrentBuffer.Length ? _structureTrie.MaxLength : _structureTrie.MinLength; for (; currentBufferPosition < bufferLength - sz + 1; ++currentBufferPosition) { if (bufferLength == 0) { currentBufferPosition = 0; return; } int token; if (_structureTrie.GetOperation(processorState.CurrentBuffer, bufferLength, ref currentBufferPosition, out token)) { if (token == _mapping.OpenOpenElementToken) { ++depth; inElement = true; } else if (token == _mapping.SelfClosingElementEndToken) { --depth; inElement = false; } else if (token == _mapping.CloseElementTagToken) { if (inElement) { inElement = false; } else { --depth; } } else if (token == _mapping.OpenCloseElementToken) { inElement = false; } if (depth == 0) { return; } } } } //Ran out of places to check and haven't reached the actual match, consume all the way to the end currentBufferPosition = bufferLength; }
public GlobbingPatternMatcher(string pattern) { Pattern = pattern; List <Tuple <int, GlobbingPatternToken> > tokens = new List <Tuple <int, GlobbingPatternToken> >(); byte[] patternBytes = Encoding.UTF8.GetBytes(pattern); int currentBufferPosition = 0; while (currentBufferPosition != patternBytes.Length) { int token; int originalBufferPosition = currentBufferPosition; tokens.Add(!Trie.GetOperation(patternBytes, patternBytes.Length, ref currentBufferPosition, out token) ? Tuple.Create(currentBufferPosition++, GlobbingPatternToken.Literal) : Tuple.Create(originalBufferPosition, (GlobbingPatternToken)token)); } StringBuilder rx = new StringBuilder("^"); int literalBegin = 0; GlobbingPatternToken lastToken = GlobbingPatternToken.AnyNumberOfPathParts; for (int i = 0; i < tokens.Count; ++i) { if (lastToken == GlobbingPatternToken.Literal && tokens[i].Item2 != GlobbingPatternToken.Literal) { rx.Append(Regex.Escape(Encoding.UTF8.GetString(patternBytes, literalBegin, tokens[i].Item1 - literalBegin))); } switch (tokens[i].Item2) { case GlobbingPatternToken.Literal: if (lastToken != GlobbingPatternToken.Literal) { literalBegin = tokens[i].Item1; } break; case GlobbingPatternToken.AnyNumberOfPathParts: rx.Append(@"(?:[^\\/]*[\\/])*"); break; case GlobbingPatternToken.CloseCharSet: rx.Append("]"); break; case GlobbingPatternToken.OpenCharSet: rx.Append("["); break; case GlobbingPatternToken.OnePathPart: rx.Append(@"[^\\/]*"); break; case GlobbingPatternToken.Wildcard: rx.Append(@"[^\\/]?"); break; case GlobbingPatternToken.SeparatorChar: case GlobbingPatternToken.SeparatorChar2: rx.Append(@"[\\/]"); break; } lastToken = tokens[i].Item2; } if (lastToken == GlobbingPatternToken.Literal) { rx.Append(Regex.Escape(Encoding.UTF8.GetString(patternBytes, literalBegin, patternBytes.Length - literalBegin))); } _regex = new Regex(rx.ToString(), RegexOptions.Compiled); }