예제 #1
0
        private void Tokenize(string document, SemanticTokensBuilder builder)
        {
            var faker = new Faker <TokenizationValue>()
                        .RuleFor(z => z.type,
                                 f => f.PickRandom(SemanticTokenType.Defaults).OrNull(f, 0.2f) ?? new SemanticTokenType("none")
                                 )
                        .RuleFor(x => x.Modifiers,
                                 f => Enumerable.Range(0, f.Random.Int(0, 3))
                                 .Select(z =>
                                         f.PickRandom(SemanticTokenModifier.Defaults).OrNull(f, 0.2f) ??
                                         new SemanticTokenModifier("none")
                                         )
                                 .ToArray()
                                 .OrNull(f, 0.2f)
                                 );

            foreach (var(line, text) in document.Split('\n').Select((text, line) => (line, text)))
            {
                var parts = text.TrimEnd().Split(';', ' ', '.', '"', '(', ')');
                var index = 0;
                foreach (var part in parts)
                {
                    faker.UseSeed(part.Length * line * text.Length);
                    if (string.IsNullOrWhiteSpace(part))
                    {
                        continue;
                    }
                    // _logger.LogWarning("Index for part before {Index}: {Text}", index, part);
                    index = text.IndexOf(part, index, StringComparison.Ordinal);
                    // _logger.LogInformation("Index for part after {Index}: {Text}", index, part);
                    var item = faker.Generate();
                    builder.Push(line, index, part.Length, item.type, item.Modifiers);
                }
            }
        }
예제 #2
0
        protected override async Task Tokenize(SemanticTokensBuilder builder, ITextDocumentIdentifierParams identifier,
                                               CancellationToken cancellationToken)
        {
            using var typesEnumerator     = RotateEnum(SemanticTokenType.Defaults).GetEnumerator();
            using var modifiersEnumerator = RotateEnum(SemanticTokenModifier.Defaults).GetEnumerator();
            // you would normally get this from a common source that is managed by current open editor, current active editor, etc.
            var content = await File.ReadAllTextAsync(DocumentUri.GetFileSystemPath(identifier), cancellationToken);

            await Task.Yield();

            foreach (var(line, text) in content.Split('\n').Select((text, line) => (line, text)))
            {
                var parts = text.TrimEnd().Split(';', ' ', '.', '"', '(', ')');
                var index = 0;
                foreach (var part in parts)
                {
                    typesEnumerator.MoveNext();
                    modifiersEnumerator.MoveNext();
                    if (string.IsNullOrWhiteSpace(part))
                    {
                        continue;
                    }
                    index = text.IndexOf(part, index, StringComparison.Ordinal);
                    builder.Push(line, index, part.Length, typesEnumerator.Current, modifiersEnumerator.Current);
                }
            }
        }
예제 #3
0
        protected override async Task Tokenize(SemanticTokensBuilder builder,
                                               ITextDocumentIdentifierParams identifier,
                                               CancellationToken cancellationToken)
        {
            var ast =
                _bufferManager.GetAstFor(identifier.TextDocument.Uri.ToString()).Expressions.ToDotList();

            var symbols = ExtractTypes <DotSymbol>(ast);

            // _logger.LogInformation(
            //     $"-----------------extracted Symbols:\n{symbols.PrettyPrint()}");
            // TODO: Make this parallel
            foreach (var symbol in symbols)
            {
                // _logger.LogInformation(
                //     $"({symbol.Line}:{symbol.Column}) symbol: {symbol.Name}");
                builder.Push(symbol.Line - 1, symbol.Column,
                             symbol.Name.Length,
                             SemanticTokenType.Function, SemanticTokenModifier.Static,
                             SemanticTokenModifier.Documentation);
            }

            var strings = ExtractTypes <DotString>(ast);

            // _logger.LogInformation(
            //     $"-----------------extracted Strings:\n{strings.PrettyPrint()}");
            foreach (var str in strings)
            {
                // _logger.LogInformation(
                //     $"({str.Line}:{str.Column}) string: {str.Value}");
                builder.Push(str.Line - 1, str.Column, str.Value.Length + 2,
                             SemanticTokenType.Class, SemanticTokenModifier.Static,
                             SemanticTokenModifier.Readonly);
            }
        }
        protected override Task Tokenize(SemanticTokensBuilder builder, ITextDocumentIdentifierParams identifier,
                                         CancellationToken cancellationToken)
        {
            ScriptFile file = _workspaceService.GetFile(identifier.TextDocument.Uri);

            foreach (Token token in file.ScriptTokens)
            {
                PushToken(token, builder);
            }
            return(Task.CompletedTask);
        }
예제 #5
0
        protected override Task Tokenize(SemanticTokensBuilder builder, ITextDocumentIdentifierParams identifier, CancellationToken cancellationToken)
        {
            var compilationContext = this.compilationManager.GetCompilation(identifier.TextDocument.Uri);

            if (compilationContext != null)
            {
                SemanticTokenVisitor.BuildSemanticTokens(builder, compilationContext);
            }

            return(Task.CompletedTask);
        }
 private static void PushToken(Token token, SemanticTokensBuilder builder)
 {
     foreach (SemanticToken sToken in ConvertToSemanticTokens(token))
     {
         builder.Push(
             sToken.Line,
             sToken.Column,
             length: sToken.Text.Length,
             sToken.Type,
             tokenModifiers: sToken.TokenModifiers);
     }
 }
예제 #7
0
        protected override async Task Tokenize(SemanticTokensBuilder builder,
                                               ITextDocumentIdentifierParams identifier,
                                               CancellationToken cancellationToken)
        {
            var doc = _lspHost.Documents[identifier.TextDocument.Uri];

            if (doc.Compilation == null)
            {
                return;
            }

            foreach (var syntaxTree in doc.Compilation.SyntaxTrees)
            {
                TokenizeTokenTree(builder, syntaxTree.Root, cancellationToken);
            }
        }
예제 #8
0
        protected override async Task Tokenize(
            SemanticTokensBuilder builder, ITextDocumentIdentifierParams identifier,
            CancellationToken cancellationToken
            )
        {
            var hash = Hash.StringHash(identifier.TextDocument.Uri.GetFileSystemPath());

            var    now            = DateTime.Now;
            IntPtr tokensPtr      = IntPtr.Zero;
            int    count          = 0;
            long   internalMicros = TreeSitter.GetTokens(hash, out tokensPtr, out count); // this can be async because it actually does work.
            var    then           = DateTime.Now;
            var    elapsed        = then - now;

            _logger.LogInformation("Elapsed time for C++ tokens: " + elapsed.TotalMilliseconds + " native time: " + internalMicros);

            List <Diagnostic> diagnostics = new List <Diagnostic>();

            unsafe
            {
                SemanticToken *ptr = (SemanticToken *)tokensPtr;
                for (int i = 0; i < count; i++)
                {
                    if ((int)ptr[i].type == 255)
                    {
                        Diagnostic diag = new Diagnostic();
                        diag.Severity    = DiagnosticSeverity.Error;
                        diag.Range       = new OmniSharp.Extensions.LanguageServer.Protocol.Models.Range();
                        diag.Range.Start = new Position(ptr[i].line, ptr[i].col);
                        diag.Range.End   = new Position(ptr[i].line, ptr[i].col + ptr[i].length);
                        diag.Message     = "undeclared identifer";
                        diagnostics.Add(diag);

                        continue;
                    }

                    builder.Push(ptr[i].line, ptr[i].col, ptr[i].length, (int)ptr[i].type, (int)ptr[i].modifier);
                }
            }

            diagnoser.Add(identifier.TextDocument.Uri, 0, diagnostics);
            diagnoser.Publish(identifier.TextDocument.Uri);
        }
예제 #9
0
        private void TokenizeTokenTree(SemanticTokensBuilder builder, SyntaxNode token, CancellationToken cancellationToken)
        {
            if (cancellationToken.IsCancellationRequested)
            {
                return;
            }

            switch (token.Kind)
            {
            case SyntaxKind.SingleLineCommentTrivia:
            case SyntaxKind.MultiLineCommentTrivia:
                _logger.LogInformation($"Token: {token.Kind}. Line: {token.Location.StartLine}. Char: {token.Span.Start}. Len: {token.Span.Length}");
                builder.Push(token.Location.StartLine, token.Location.StartCharacter, token.Span.Length, SemanticTokenType.Comment, SemanticTokenModifier.Defaults);
                break;

            case SyntaxKind.NumberToken:
                _logger.LogInformation($"Token: {token.Kind}. Line: {token.Location.StartLine}. Char: {token.Span.Start}. Len: {token.Span.Length}");
                builder.Push(token.Location.StartLine, token.Location.StartCharacter, token.Span.Length, SemanticTokenType.Number, SemanticTokenModifier.Defaults);
                break;

            case SyntaxKind.StringToken:
                _logger.LogInformation($"Token: {token.Kind}. Line: {token.Location.StartLine}. Char: {token.Span.Start}. Len: {token.Span.Length}");
                builder.Push(token.Location.StartLine, token.Location.StartCharacter, token.Span.Length, SemanticTokenType.String, SemanticTokenModifier.Defaults);
                break;

            case SyntaxKind.PlusToken:
            case SyntaxKind.PlusEqualsToken:
            case SyntaxKind.MinusToken:
            case SyntaxKind.MinusEqualsToken:
            case SyntaxKind.StarToken:
            case SyntaxKind.StarEqualsToken:
            case SyntaxKind.SlashToken:
            case SyntaxKind.SlashEqualsToken:
            case SyntaxKind.BangToken:
            case SyntaxKind.EqualsToken:
            case SyntaxKind.TildeToken:
            case SyntaxKind.HatToken:
            case SyntaxKind.HatEqualsToken:
            case SyntaxKind.AmpersandToken:
            case SyntaxKind.AmpersandAmpersandToken:
            case SyntaxKind.AmpersandEqualsToken:
            case SyntaxKind.PipeToken:
            case SyntaxKind.PipeEqualsToken:
            case SyntaxKind.PipePipeToken:
            case SyntaxKind.EqualsEqualsToken:
            case SyntaxKind.BangEqualsToken:
            case SyntaxKind.LessToken:
            case SyntaxKind.LessOrEqualsToken:
            case SyntaxKind.GreaterToken:
            case SyntaxKind.GreaterOrEqualsToken:
                _logger.LogInformation($"Token: {token.Kind}. Line: {token.Location.StartLine}. Char: {token.Span.Start}. Len: {token.Span.Length}");
                builder.Push(token.Location.StartLine, token.Location.StartCharacter, token.Span.Length, SemanticTokenType.Operator, SemanticTokenModifier.Defaults);
                break;

            case SyntaxKind.IdentifierToken:
                _logger.LogInformation($"Token: {token.Kind}. Line: {token.Location.StartLine}. Char: {token.Span.Start}. Len: {token.Span.Length}");
                // TODO: Be more specific
                builder.Push(token.Location.StartLine, token.Location.StartCharacter, token.Span.Length, SemanticTokenType.Member, SemanticTokenModifier.Defaults);
                break;

            case SyntaxKind.BreakKeyword:
            case SyntaxKind.ContinueKeyword:
            case SyntaxKind.ElseKeyword:
            case SyntaxKind.FalseKeyword:
            case SyntaxKind.ForKeyword:
            case SyntaxKind.FunctionKeyword:
            case SyntaxKind.IfKeyword:
            case SyntaxKind.LetKeyword:
            case SyntaxKind.ReturnKeyword:
            case SyntaxKind.ToKeyword:
            case SyntaxKind.TrueKeyword:
            case SyntaxKind.VarKeyword:
            case SyntaxKind.WhileKeyword:
            case SyntaxKind.DoKeyword:
            case SyntaxKind.StructKeyword:
                _logger.LogInformation($"Token: {token.Kind}. Line: {token.Location.StartLine}. Char: {token.Span.Start}. Len: {token.Span.Length}");
                builder.Push(token.Location.StartLine, token.Location.StartCharacter, token.Span.Length, SemanticTokenType.Keyword, SemanticTokenModifier.Defaults);
                break;

            case SyntaxKind.Parameter:
                _logger.LogInformation($"Token: {token.Kind}. Line: {token.Location.StartLine}. Char: {token.Span.Start}. Len: {token.Span.Length}");
                builder.Push(token.Location.StartLine, token.Location.StartCharacter, token.Span.Length, SemanticTokenType.Parameter, SemanticTokenModifier.Defaults);
                break;
            }

            // PERF: Don't do recursion
            foreach (var child in token.GetChildren())
            {
                TokenizeTokenTree(builder, child, cancellationToken);
            }
        }
예제 #10
0
 protected override Task Tokenize(
     SemanticTokensBuilder builder, ITextDocumentIdentifierParams identifier,
     CancellationToken cancellationToken
     ) =>
 _tokenize(builder, identifier, _capability, cancellationToken);