internal static int Tokenize_AvoidLastSliceV3(ReadOnlySpan <byte> token, Span <TokenSegment> segments) { int count = 0; int start = 0; int end; int last = token.LastIndexOf(ByteDot); var span = token; while ((end = span.IndexOf(ByteDot)) >= 0 && count < 5) { segments[count++] = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { segments[count++] = new TokenSegment(last + 1, token.Length - last - 1); goto Found; } span = token.Slice(start); } return(0); Found: return(count); }
internal unsafe static int Tokenize_Unroll_Unsafe_Goto(ReadOnlySpan <byte> token, TokenSegment *pSegments) { int start; var span = token; int last = span.LastIndexOf(ByteDot); int end = span.IndexOf(ByteDot); if (end < 0) { goto Malformed; } *pSegments = new TokenSegment(0, end); start = end + 1; span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { goto Malformed; } *(pSegments + 1) = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { *(pSegments + 2) = new TokenSegment(last + 1, token.Length - last - 1); return(3); } span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { goto Malformed; } *(pSegments + 2) = new TokenSegment(start, end); start += end + 1; span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { goto Malformed; } *(pSegments + 3) = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { *(pSegments + 4) = new TokenSegment(last + 1, token.Length - last - 1); return(5); } Malformed: return(0); }
public TokenSegment <T> Add(ReadOnlyMemory <T> mem) { var segment = new TokenSegment <T>(mem); segment.RunningIndex = RunningIndex + Memory.Length; Next = segment; return(segment); }
public static int Tokenize(ReadOnlySpan <byte> token, ref TokenSegment segments) { int start; var span = token; int last = span.LastIndexOf(Constants.ByteDot); int end = span.IndexOf(Constants.ByteDot); if (end < 0) { return(0); } segments = new TokenSegment(0, end); start = end + 1; span = token.Slice(start); end = span.IndexOf(Constants.ByteDot); if (end < 0) { return(0); } Unsafe.Add(ref segments, 1) = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { Unsafe.Add(ref segments, 2) = new TokenSegment(last + 1, token.Length - last - 1); return(Constants.JwsSegmentCount); } span = token.Slice(start); end = span.IndexOf(Constants.ByteDot); if (end < 0) { return(0); } Unsafe.Add(ref segments, 2) = new TokenSegment(start, end); start += end + 1; span = token.Slice(start); end = span.IndexOf(Constants.ByteDot); if (end < 0) { return(0); } Unsafe.Add(ref segments, 3) = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { Unsafe.Add(ref segments, 4) = new TokenSegment(last + 1, token.Length - last - 1); return(Constants.JweSegmentCount); } return(0); }
internal unsafe static int Tokenize_Unroll_ByRef(ReadOnlySpan <byte> token, ref TokenSegment segments) { int start; var span = token; int last = span.LastIndexOf(ByteDot); int end = span.IndexOf(ByteDot); if (end < 0) { return(0); } segments = new TokenSegment(0, end); start = end + 1; span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { return(0); } Unsafe.Add(ref segments, 1) = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { Unsafe.Add(ref segments, 2) = new TokenSegment(last + 1, token.Length - last - 1); return(3); } span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { return(0); } Unsafe.Add(ref segments, 2) = new TokenSegment(start, end); start += end + 1; span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { return(0); } Unsafe.Add(ref segments, 3) = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { Unsafe.Add(ref segments, 4) = new TokenSegment(last + 1, token.Length - last - 1); return(5); } return(0); }
internal static int Tokenize_Unroll(ReadOnlySpan <byte> token, Span <TokenSegment> segments) { int start; var span = token; int last = span.LastIndexOf(ByteDot); int end = span.IndexOf(ByteDot); if (end < 0) { return(0); } segments[0] = new TokenSegment(0, end); start = end + 1; span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { return(0); } segments[1] = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { segments[2] = new TokenSegment(last + 1, token.Length - last - 1); return(3); } span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { return(0); } segments[2] = new TokenSegment(start, end); start += end + 1; span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { return(0); } segments[3] = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { segments[4] = new TokenSegment(last + 1, token.Length - last - 1); return(5); } return(0); }
internal unsafe static int Tokenize_ByRef(ReadOnlySpan <byte> token, ref TokenSegment segments) { int count = 0; int start = 0; int end; var span = token; while ((end = span.IndexOf(ByteDot)) >= 0 && count < 5) { Unsafe.Add(ref segments, count++) = new TokenSegment(start, end); start += end + 1; span = token.Slice(start); } // Residue if (count < 5) { Unsafe.Add(ref segments, count++) = new TokenSegment(start, span.Length); } return(count); }
internal static int Tokenize(ReadOnlySpan <byte> token, Span <TokenSegment> segments) { int count = 0; int start = 0; int end; var span = token; while ((end = span.IndexOf(ByteDot)) >= 0 && count < 5) { segments[count++] = new TokenSegment(start, end); start += end + 1; span = token.Slice(start); } // Residue if (count < 5) { segments[count++] = new TokenSegment(start, span.Length); } return(count); }
public unsafe static int Tokenize_Unsafe(ReadOnlySpan <byte> token, TokenSegment *pSegments) { int count = 0; int start = 0; int end; var span = token; while ((end = span.IndexOf(ByteDot)) >= 0 && count < 5) { *(pSegments + count++) = new TokenSegment(start, end); start += end + 1; span = token.Slice(start); } // Residue if (count < 5) { *(pSegments + count++) = new TokenSegment(start, span.Length); } return(count); }
internal static unsafe int Tokenize_AvoidLastSlice_Unsafe(ReadOnlySpan <byte> token, TokenSegment *pSegments) { int count = 0; int start = 0; int end; int last = token.LastIndexOf(ByteDot); var span = token; while ((end = span.IndexOf(ByteDot)) >= 0 && count < 5) { *(pSegments + count++) = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { *(pSegments + count++) = new TokenSegment(last + 1, token.Length - last - 1); break; } span = token.Slice(start); } return(count); }
internal static int Tokenize_AvoidLastSlice_ByRef(ReadOnlySpan <byte> token, ref TokenSegment segments) { int count = 0; int start = 0; int end; int last = token.LastIndexOf(ByteDot); var span = token; while ((end = span.IndexOf(ByteDot)) >= 0 && count < 5) { Unsafe.Add(ref segments, count++) = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { Unsafe.Add(ref segments, count++) = new TokenSegment(last + 1, token.Length - last - 1); return(count); } span = token.Slice(start); } return(0); }
public static int Tokenize_Unroll_Goto(ReadOnlySpan <byte> token, Span <TokenSegment> segments) { int start; var span = token; int last = span.LastIndexOf(ByteDot); int end = span.IndexOf(ByteDot); if (end < 0) { goto Malformed; } segments[0] = new TokenSegment(0, end); start = end + 1; span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { goto Malformed; } segments[1] = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { segments[2] = new TokenSegment(last + 1, token.Length - last - 1); goto Jws; } span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { goto Malformed; } segments[2] = new TokenSegment(start, end); start += end + 1; span = token.Slice(start); end = span.IndexOf(ByteDot); if (end < 0) { goto Malformed; } segments[3] = new TokenSegment(start, end); start += end + 1; if (last == start - 1) { segments[4] = new TokenSegment(last + 1, token.Length - last - 1); goto Jwe; } Malformed: return(0); Jws: return(3); Jwe: return(5); }