public void Implementation() { var path = Input; Span <PathSegment> segments = stackalloc PathSegment[MaxCount]; FastPathTokenizer.Tokenize(path, segments); }
public void Tokenize_RootPath() { // Arrange Span <PathSegment> segments = stackalloc PathSegment[1]; // Act var count = FastPathTokenizer.Tokenize("/", segments); // Assert Assert.Equal(0, count); }
private (HttpContext context, string path, int count) CreateMatchingContext(string requestPath, PathSegment[] buffer) { var context = CreateContext(); context.Request.Path = requestPath; // First tokenize the path into series of segments. var count = FastPathTokenizer.Tokenize(requestPath, buffer); return(context, requestPath, count); }
public void Tokenize_SingleSegment() { // Arrange Span <PathSegment> segments = stackalloc PathSegment[1]; // Act var count = FastPathTokenizer.Tokenize("/abc", segments); // Assert Assert.Equal(1, count); Assert.Equal(new PathSegment(1, 3), segments[0]); }
public void Tokenize_EmptySegments() { // Arrange Span <PathSegment> segments = stackalloc PathSegment[3]; // Act var count = FastPathTokenizer.Tokenize("///c", segments); // Assert Assert.Equal(3, count); Assert.Equal(new PathSegment(1, 0), segments[0]); Assert.Equal(new PathSegment(2, 0), segments[1]); Assert.Equal(new PathSegment(3, 1), segments[2]); }
public void Tokenize_LongerSegments() { // Arrange Span <PathSegment> segments = stackalloc PathSegment[3]; // Act var count = FastPathTokenizer.Tokenize("/aaa/bb/ccccc", segments); // Assert Assert.Equal(3, count); Assert.Equal(new PathSegment(1, 3), segments[0]); Assert.Equal(new PathSegment(5, 2), segments[1]); Assert.Equal(new PathSegment(8, 5), segments[2]); }
[Fact] // Empty trailing / is ignored public void Tokenize_WithSomeSegments_TrailingSlash() { // Arrange Span <PathSegment> segments = stackalloc PathSegment[3]; // Act var count = FastPathTokenizer.Tokenize("/a/b/c/", segments); // Assert Assert.Equal(3, count); Assert.Equal(new PathSegment(1, 1), segments[0]); Assert.Equal(new PathSegment(3, 1), segments[1]); Assert.Equal(new PathSegment(5, 1), segments[2]); }
public void Tokenize_TooManySegments() { // Arrange Span <PathSegment> segments = stackalloc PathSegment[3]; // Act var count = FastPathTokenizer.Tokenize("/a/b/c/d", segments); // Assert Assert.Equal(3, count); Assert.Equal(new PathSegment(1, 1), segments[0]); Assert.Equal(new PathSegment(3, 1), segments[1]); Assert.Equal(new PathSegment(5, 1), segments[2]); }
public sealed override Task MatchAsync(HttpContext httpContext) { if (httpContext == null) { throw new ArgumentNullException(nameof(httpContext)); } // All of the logging we do here is at level debug, so we can get away with doing a single check. var log = _logger.IsEnabled(LogLevel.Debug); // The sequence of actions we take is optimized to avoid doing expensive work // like creating substrings, creating route value dictionaries, and calling // into policies like versioning. var path = httpContext.Request.Path.Value !; // First tokenize the path into series of segments. Span <PathSegment> buffer = stackalloc PathSegment[_maxSegmentCount]; var count = FastPathTokenizer.Tokenize(path, buffer); var segments = buffer.Slice(0, count); // FindCandidateSet will process the DFA and return a candidate set. This does // some preliminary matching of the URL (mostly the literal segments). var(candidates, policies) = FindCandidateSet(httpContext, path, segments); var candidateCount = candidates.Length; if (candidateCount == 0) { if (log) { Log.CandidatesNotFound(_logger, path); } return(Task.CompletedTask); } if (log) { Log.CandidatesFound(_logger, path, candidates); } var policyCount = policies.Length; // This is a fast path for single candidate, 0 policies and default selector if (candidateCount == 1 && policyCount == 0 && _isDefaultEndpointSelector) { ref readonly var candidate = ref candidates[0];