public async Task UpdateCacheAsync( Uri uri, LSP.SemanticTokens tokens, CancellationToken cancellationToken) { Contract.ThrowIfNull(tokens.ResultId); using (await _semaphore.DisposableWaitAsync(cancellationToken).ConfigureAwait(false)) { // Case 1: Document does not currently have any token sets cached. Create a cache // for the document and return. if (!_tokens.TryGetValue(uri, out var tokenSets)) { _tokens.Add(uri, new List <LSP.SemanticTokens> { tokens }); return; } // Case 2: Document already has the maximum number of token sets cached. Remove the // oldest token set from the cache, and then add the new token set (see case 3). if (tokenSets.Count >= _maxCachesPerDoc) { tokenSets.RemoveAt(0); } // Case 3: Document has less than the maximum number of token sets cached. // Add new token set to cache. tokenSets.Add(tokens); } }
public async Task TestGetSemanticTokensRange_StringLiteralAsync() { var markup = @"{|caret:|}class C { void M() { var x = @""one two """" three""; } } "; using var testLspServer = await CreateTestLspServerAsync(markup); var range = new LSP.Range { Start = new Position(0, 0), End = new Position(9, 0) }; var results = await RunGetSemanticTokensRangeAsync(testLspServer, testLspServer.GetLocations("caret").First(), range); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 4, 8, 3, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' 1, 4, 2, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.StringEscapeCharacter], 0, // '""' }, }; await VerifyNoMultiLineTokens(testLspServer, results.Data !).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results.Data); }
public async Task TestGetSemanticTokensRangeAsync() { var markup = @"{|caret:|}// Comment static class C { } "; using var testLspServer = CreateTestLspServer(markup, out var locations); var range = new LSP.Range { Start = new Position(1, 0), End = new Position(2, 0) }; var results = await RunGetSemanticTokensRangeAsync(testLspServer, locations["caret"].First(), range); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static' 0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, ResultId = "1" }; await VerifyNoMultiLineTokens(testLspServer, results.Data !).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results.Data); Assert.Equal(expectedResults.ResultId, results.ResultId); }
public async Task TestGetSemanticTokensRange_PartialDoc_RazorAsync() { // Razor docs should be returning semantic + syntactic reuslts. var markup = @"{|caret:|}// Comment static class C { } "; using var testLspServer = await CreateTestLspServerAsync(markup); var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); var range = new LSP.Range { Start = new Position(1, 0), End = new Position(2, 0) }; var options = ClassificationOptions.Default; var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( document, SemanticTokensHelpers.TokenTypeToIndex, range, options, includeSyntacticClassifications : true, CancellationToken.None); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 1, 0, 6, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static' 0, 7, 5, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C' 0, 2, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 0, 2, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, }; await VerifyNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results); }
public async Task TestGetSemanticTokensAsync() { var markup = @"{|caret:|}// Comment static class C { }"; using var testLspServer = await CreateTestLspServerAsync(markup); var results = await RunGetSemanticTokensAsync(testLspServer, testLspServer.GetLocations("caret").First()); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 0, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment' 1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static' 0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, ResultId = "1" }; await VerifyNoMultiLineTokens(testLspServer, results.Data !).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results.Data); Assert.Equal(expectedResults.ResultId, results.ResultId); }
public async Task TestGetSemanticTokensMultiLineCommentAsync() { var markup = @"{|caret:|}class C { /* one two three */ } "; using var testLspServer = CreateTestLspServer(markup, out var locations); var results = await RunGetSemanticTokensAsync(testLspServer, locations["caret"].First()); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 0, 0, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], 0, // 'C' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 0, 2, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '/* one' 1, 0, 3, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // 'two' 1, 0, 8, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // 'three */' 0, 9, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, ResultId = "1" }; await VerifyNoMultiLineTokens(testLspServer, results.Data !).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results.Data); Assert.Equal(expectedResults.ResultId, results.ResultId); }
public async Task TestGetSemanticTokensRange_MultiLineCommentAsync() { var markup = @"{|caret:|}class C { /* one two three */ } "; var range = new LSP.Range { Start = new Position(0, 0), End = new Position(3, 0) }; using var testLspServer = await CreateTestLspServerAsync(markup); var results = await RunGetSemanticTokensRangeAsync(testLspServer, testLspServer.GetLocations("caret").First(), range); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 0, 0, 5, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' 0, 2, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 0, 2, 6, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '/* one' 1, 0, 3, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // 'two' 1, 0, 8, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // 'three */' 0, 9, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, }; await VerifyNoMultiLineTokens(testLspServer, results.Data !).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results.Data); }
public async Task TestGetSemanticTokensAsync() { var markup = @"{|caret:|}// Comment static class C { }"; using var workspace = CreateTestWorkspace(markup, out var locations); var results = await RunGetSemanticTokensAsync(workspace.CurrentSolution, locations["caret"].First()); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 0, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment' 1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static' 0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, ResultId = "1" }; Assert.Equal(expectedResults.Data, results.Data); Assert.Equal(expectedResults.ResultId, results.ResultId); }
public async Task TestGetSemanticTokensRange_StringLiteral_RazorAsync() { var markup = @"{|caret:|}class C { void M() { var x = @""one two """" three""; } } "; using var testLspServer = await CreateTestLspServerAsync(markup); var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); var range = new LSP.Range { Start = new Position(0, 0), End = new Position(9, 0) }; var options = ClassificationOptions.Default; var(results, _) = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( document, SemanticTokensHelpers.TokenTypeToIndex, range, options, includeSyntacticClassifications : true, CancellationToken.None); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 0, 0, 5, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' 1, 0, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 1, 4, 4, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'void' 0, 5, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M' 0, 1, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' 0, 1, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' 1, 4, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 1, 8, 3, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' 0, 4, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' 0, 2, 1, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '=' 0, 2, 5, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"one' 1, 0, 6, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'two' 0, 4, 2, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.StringEscapeCharacter], 0, // '""' 1, 0, 6, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'three"' 0, 6, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' 1, 4, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' 1, 0, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, }; await VerifyNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results); }
public async Task TestGetSemanticTokensRange_StringLiteralAsync() { var markup = @"{|caret:|}class C { void M() { var x = @""one two """" three""; } } "; using var testLspServer = await CreateTestLspServerAsync(markup); var range = new LSP.Range { Start = new Position(0, 0), End = new Position(9, 0) }; var results = await RunGetSemanticTokensRangeAsync(testLspServer, testLspServer.GetLocations("caret").First(), range); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 0, 0, 5, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' 1, 0, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 1, 4, 4, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'void' 0, 5, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M' 0, 1, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' 0, 1, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' 1, 4, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 1, 8, 3, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' 0, 4, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' 0, 2, 1, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '=' 0, 2, 5, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"one' 1, 0, 6, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'two' 0, 4, 2, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.StringEscapeCharacter], 0, // '""' 1, 0, 6, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'three"' 0, 6, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' 1, 4, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' 1, 0, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, }; await VerifyNoMultiLineTokens(testLspServer, results.Data !).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results.Data); }
public async Task TestGetSemanticTokensStringLiteralAsync() { var markup = @"{|caret:|}class C { void M() { var x = @""one two """" three""; } } "; using var testLspServer = CreateTestLspServer(markup, out var locations); var results = await RunGetSemanticTokensAsync(testLspServer, locations["caret"].First()); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 0, 0, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], 0, // 'C' 1, 0, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 1, 4, 4, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'void' 0, 5, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M' 0, 1, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' 0, 1, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' 1, 4, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 1, 8, 3, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' 0, 4, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '=' 0, 2, 5, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"one' 1, 0, 6, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'two' 0, 4, 2, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.StringEscapeCharacter], 0, // '""' 1, 0, 6, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'three"' 0, 6, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' 1, 4, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' 1, 0, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, ResultId = "1" }; await VerifyNoMultiLineTokens(testLspServer, results.Data !).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results.Data); Assert.Equal(expectedResults.ResultId, results.ResultId); }
public async Task TestGetSemanticTokensRange_MultiLineComment_RazorAsync() { // Testing as a Razor doc so we get both syntactic + semantic results; otherwise the results would be empty. var markup = @"{|caret:|}class C { /* one two three */ } "; using var testLspServer = await CreateTestLspServerAsync(markup); var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); var range = new LSP.Range { Start = new Position(0, 0), End = new Position(4, 0) }; var options = ClassificationOptions.Default; var(results, _) = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( document, SemanticTokensHelpers.TokenTypeToIndex, range, options, includeSyntacticClassifications : true, CancellationToken.None); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 0, 0, 5, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' 0, 2, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 0, 2, 6, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '/* one' 2, 0, 3, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // 'two' 1, 0, 8, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // 'three */' 0, 9, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, }; await VerifyNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results); }
public async Task TestGetSemanticTokensRange_FullDocAsync() { var markup = @"{|caret:|}// Comment static class C { } "; using var testLspServer = await CreateTestLspServerAsync(markup); var range = new LSP.Range { Start = new Position(0, 0), End = new Position(2, 0) }; var results = await RunGetSemanticTokensRangeAsync(testLspServer, testLspServer.GetLocations("caret").First(), range); // Everything is colorized syntactically, so we shouldn't be returning any semantic results. var expectedResults = new LSP.SemanticTokens { Data = Array.Empty <int>() }; Assert.Equal(expectedResults.Data, results.Data); }
public async Task TestAllHandlersAsync() { var markup = @"{|caret:|}// Comment static class C { } "; using var testLspServer = CreateTestLspServer(markup, out var locations); var caretLocation = locations["caret"].First(); // 1. Range handler var range = new LSP.Range { Start = new Position(1, 0), End = new Position(2, 0) }; var rangeResults = await RunGetSemanticTokensRangeAsync( testLspServer, caretLocation, range ); var expectedRangeResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static' 0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, ResultId = "1" }; await VerifyNoMultiLineTokens(testLspServer, rangeResults.Data !).ConfigureAwait(false); Assert.Equal(expectedRangeResults.Data, rangeResults.Data); Assert.Equal(expectedRangeResults.ResultId, rangeResults.ResultId); // 2. Whole document handler var wholeDocResults = await RunGetSemanticTokensAsync(testLspServer, caretLocation); var expectedWholeDocResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 0, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment' 1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static' 0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, ResultId = "2" }; await VerifyNoMultiLineTokens(testLspServer, wholeDocResults.Data !) .ConfigureAwait(false); Assert.Equal(expectedWholeDocResults.Data, wholeDocResults.Data); Assert.Equal(expectedWholeDocResults.ResultId, wholeDocResults.ResultId); // 3. Edits handler - insert newline at beginning of file var newMarkup = @" // Comment static class C { } "; UpdateDocumentText(newMarkup, testLspServer.TestWorkspace); var editResults = await RunGetSemanticTokensEditsAsync( testLspServer, caretLocation, previousResultId : "2" ); var expectedEdit = SemanticTokensEditsHandler.GenerateEdit(0, 1, new int[] { 1 }); Assert.Equal(expectedEdit, ((LSP.SemanticTokensEdits)editResults).Edits.First()); Assert.Equal("3", ((LSP.SemanticTokensEdits)editResults).ResultId); // 4. Re-request whole document handler (may happen if LSP runs into an error) var wholeDocResults2 = await RunGetSemanticTokensAsync(testLspServer, caretLocation); var expectedWholeDocResults2 = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 1, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment' 1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static' 0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 0, 2, 1, SemanticTokensCache.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' }, ResultId = "4" }; await VerifyNoMultiLineTokens(testLspServer, wholeDocResults2.Data !) .ConfigureAwait(false); Assert.Equal(expectedWholeDocResults2.Data, wholeDocResults2.Data); Assert.Equal(expectedWholeDocResults2.ResultId, wholeDocResults2.ResultId); }
public async Task TestGetSemanticTokensRange_Regex_RazorAsync() { var markup = @"{|caret:|}using System.Text.RegularExpressions; class C { void M() { var x = new Regex(""(abc)*""); } } "; using var testLspServer = await CreateTestLspServerAsync(markup); var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); var range = new LSP.Range { Start = new Position(0, 0), End = new Position(9, 0) }; var options = ClassificationOptions.Default; var(results, _) = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( document, SemanticTokensHelpers.TokenTypeToIndex, range, options, includeSyntacticClassifications : true, CancellationToken.None); var expectedResults = new LSP.SemanticTokens { Data = new int[] { // Line | Char | Len | Token type | Modifier 0, 0, 5, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'using' 0, 6, 6, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'System' 0, 6, 1, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '.' 0, 1, 4, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'Text' 0, 4, 1, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '.' 0, 1, 18, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'RegularExpressions' 0, 18, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' 2, 0, 5, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class' 0, 6, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' 1, 0, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 1, 1, 4, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'void' 0, 5, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M' 0, 1, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' 0, 1, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' 1, 1, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' 1, 2, 3, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' 0, 4, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' 0, 2, 1, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '=' 0, 2, 3, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'new' 0, 4, 5, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'Regex' 0, 5, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' 0, 1, 8, SemanticTokensHelpers.TokenTypeToIndex[LSP.SemanticTokenTypes.String], 0, // '"(abc)*"' 0, 1, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // '(' 0, 1, 3, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.RegexText], 0, // 'abc' 0, 3, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // ')' 0, 1, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.RegexQuantifier], 0, // '*' 0, 2, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' 0, 1, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' 1, 4, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // } 1, 0, 1, SemanticTokensHelpers.TokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // } } }; await VerifyNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); Assert.Equal(expectedResults.Data, results); }