public override void AddClassifications( SyntaxToken token, SemanticModel semanticModel, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, CancellationToken cancellationToken) { if (!SyntaxTokenKinds.Contains(token.RawKind)) { return; } var virtualChars = _info.VirtualCharService.TryConvertToVirtualChars(token); if (virtualChars.IsDefaultOrEmpty) { return; } // Can avoid any work if we got the same number of virtual characters back as characters in the string. In // that case, there are clearly no escaped characters. if (virtualChars.Length == token.Text.Length) { return; } foreach (var vc in virtualChars) { if (vc.Span.Length > 1) { result.Add(new ClassifiedSpan(ClassificationTypeNames.StringEscapeCharacter, vc.Span)); } } }
public override void AddClassifications( SyntaxToken token, SemanticModel semanticModel, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, CancellationToken cancellationToken) { if (!_info.IsAnyStringLiteral(token.RawKind)) { return; } if (!options.ColorizeRegexPatterns) { return; } var detector = RegexLanguageDetector.GetOrCreate(semanticModel.Compilation, _info); var tree = detector.TryParseString(token, semanticModel, cancellationToken); if (tree == null) { return; } var visitor = s_visitorPool.Allocate(); try { visitor.Result = result; AddClassifications(tree.Root, visitor, result); } finally { visitor.Result = null; s_visitorPool.Free(visitor); } }
public override void AddClassifications( SyntaxToken token, SemanticModel semanticModel, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, CancellationToken cancellationToken) { if (_info.CharLiteralTokenKind != token.RawKind && _info.StringLiteralTokenKind != token.RawKind && _info.InterpolatedTextTokenKind != token.RawKind) { return; } var virtualChars = _info.VirtualCharService.TryConvertToVirtualChars(token); if (virtualChars.IsDefaultOrEmpty) { return; } foreach (var vc in virtualChars) { if (vc.Span.Length > 1) { result.Add(new ClassifiedSpan(ClassificationTypeNames.StringEscapeCharacter, vc.Span)); } } }
public FindLiteralsProgressAdapter( IFindUsagesContext context, DefinitionItem definition, ClassificationOptions classificationOptions) { _context = context; _definition = definition; _classificationOptions = classificationOptions; }
/// <summary> /// Returns the semantic tokens data for a given document with an optional range. /// </summary> internal static async Task <int[]> ComputeSemanticTokensDataAsync( Document document, Dictionary <string, int> tokenTypesToIndex, LSP.Range?range, ClassificationOptions options, bool includeSyntacticClassifications, CancellationToken cancellationToken) { var root = await document.GetRequiredSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var text = await document.GetTextAsync(cancellationToken).ConfigureAwait(false); // By default we calculate the tokens for the full document span, although the user // can pass in a range if they wish. var textSpan = range is null ? root.FullSpan : ProtocolConversions.RangeToTextSpan(range, text); // If the full compilation is not yet available, we'll try getting a partial one. It may contain inaccurate // results but will speed up how quickly we can respond to the client's request. document = document.WithFrozenPartialSemantics(cancellationToken); options = options with { ForceFrozenPartialSemanticsForCrossProcessOperations = true }; var classifiedSpans = await GetClassifiedSpansForDocumentAsync( document, textSpan, options, includeSyntacticClassifications, cancellationToken).ConfigureAwait(false); // Multi-line tokens are not supported by VS (tracked by https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1265495). // Roslyn's classifier however can return multi-line classified spans, so we must break these up into single-line spans. var updatedClassifiedSpans = ConvertMultiLineToSingleLineSpans(text, classifiedSpans); // TO-DO: We should implement support for streaming if LSP adds support for it: // https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1276300 return(ComputeTokens(text.Lines, updatedClassifiedSpans, tokenTypesToIndex)); }
public override void AddClassifications( SyntaxNode syntax, SemanticModel semanticModel, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, CancellationToken cancellationToken) { if (syntax.IsKind(SyntaxKind.DiscardDesignation) || syntax.IsKind(SyntaxKind.DiscardPattern)) { result.Add(new ClassifiedSpan(syntax.Span, ClassificationTypeNames.Keyword)); return; } switch (syntax) { case ParameterSyntax parameter when parameter.Identifier.Text == "_": var symbol = semanticModel.GetDeclaredSymbol(parameter, cancellationToken); if (symbol?.IsDiscard == true) { result.Add(new ClassifiedSpan(parameter.Identifier.Span, ClassificationTypeNames.Keyword)); } break; case IdentifierNameSyntax identifierName when identifierName.Identifier.Text == "_": var symbolInfo = semanticModel.GetSymbolInfo(identifierName, cancellationToken); if (symbolInfo.Symbol?.Kind == SymbolKind.Discard) { result.Add(new ClassifiedSpan(syntax.Span, ClassificationTypeNames.Keyword)); } break; } }
private static async Task AddSemanticClassificationsAsync( Document document, TextSpan textSpan, IClassificationService classificationService, ArrayBuilder <ClassifiedSpan> classifiedSpans, CancellationToken cancellationToken) { var workspaceStatusService = document.Project.Solution.Workspace.Services.GetRequiredService <IWorkspaceStatusService>(); // Importantly, we do not await/wait on the fullyLoadedStateTask. We do not want to ever be waiting on work // that may end up touching the UI thread (As we can deadlock if GetTagsSynchronous waits on us). Instead, // we only check if the Task is completed. Prior to that we will assume we are still loading. Once this // task is completed, we know that the WaitUntilFullyLoadedAsync call will have actually finished and we're // fully loaded. var isFullyLoadedTask = workspaceStatusService.IsFullyLoadedAsync(cancellationToken); var isFullyLoaded = isFullyLoadedTask.IsCompleted && isFullyLoadedTask.GetAwaiter().GetResult(); // If we're not fully loaded try to read from the cache instead so that classifications appear up to date. // New code will not be semantically classified, but will eventually when the project fully loads. if (await TryAddSemanticClassificationsFromCacheAsync(document, textSpan, classifiedSpans, isFullyLoaded, cancellationToken).ConfigureAwait(false)) { return; } var options = ClassificationOptions.From(document.Project); await classificationService.AddSemanticClassificationsAsync( document, textSpan, options, classifiedSpans, cancellationToken).ConfigureAwait(false); }
private Task ProduceTagsAsync( TaggerContext <IClassificationTag> context, DocumentSnapshotSpan snapshotSpan, IClassificationService classificationService, ClassificationOptions options, ClassificationType type, CancellationToken cancellationToken) { return(ClassificationUtilities.ProduceTagsAsync( context, snapshotSpan, classificationService, _owner._typeMap, options, type, cancellationToken)); }
public ValueTask <SerializableClassifiedSpans> GetSemanticClassificationsAsync( PinnedSolutionInfo solutionInfo, DocumentId documentId, TextSpan span, ClassificationOptions options, StorageDatabase database, bool isFullyLoaded, CancellationToken cancellationToken) { return(RunServiceAsync(async cancellationToken => { var solution = await GetSolutionAsync(solutionInfo, cancellationToken).ConfigureAwait(false); var document = solution.GetDocument(documentId) ?? await solution.GetSourceGeneratedDocumentAsync(documentId, cancellationToken).ConfigureAwait(false); Contract.ThrowIfNull(document); using var _ = ArrayBuilder <ClassifiedSpan> .GetInstance(out var temp); await AbstractClassificationService.AddSemanticClassificationsInCurrentProcessAsync( document, span, options, temp, cancellationToken).ConfigureAwait(false); if (isFullyLoaded) { // Once fully loaded, there's no need for us to keep around any of the data we cached in-memory // during the time the solution was loading. lock (_cachedData) _cachedData.Clear(); // Enqueue this document into our work queue to fully classify and cache. _workQueue.AddWork((document, options, database)); } return SerializableClassifiedSpans.Dehydrate(temp.ToImmutable()); }, cancellationToken)); }
/// <summary> /// Returns the semantic tokens data for a given document with an optional range. /// </summary> internal static async Task <int[]> ComputeSemanticTokensDataAsync( Document document, Dictionary <string, int> tokenTypesToIndex, LSP.Range?range, ClassificationOptions options, bool includeSyntacticClassifications, CancellationToken cancellationToken) { var root = await document.GetRequiredSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var text = await document.GetTextAsync(cancellationToken).ConfigureAwait(false); // By default we calculate the tokens for the full document span, although the user // can pass in a range if they wish. var textSpan = range is null ? root.FullSpan : ProtocolConversions.RangeToTextSpan(range, text); var classifiedSpans = await GetClassifiedSpansForDocumentAsync( document, textSpan, options, includeSyntacticClassifications, cancellationToken).ConfigureAwait(false); // Multi-line tokens are not supported by VS (tracked by https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1265495). // Roslyn's classifier however can return multi-line classified spans, so we must break these up into single-line spans. var updatedClassifiedSpans = ConvertMultiLineToSingleLineSpans(text, classifiedSpans); // TO-DO: We should implement support for streaming if LSP adds support for it: // https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1276300 return(ComputeTokens(text.Lines, updatedClassifiedSpans, tokenTypesToIndex)); }
public override void AddClassifications( SyntaxToken lessThanToken, SemanticModel semanticModel, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, CancellationToken cancellationToken) { var syntaxTree = semanticModel.SyntaxTree; if (syntaxTree.IsInPartiallyWrittenGeneric(lessThanToken.Span.End, cancellationToken, out var identifier)) { // IsInPartiallyWrittenGeneric will return true for things that could be // partially generic method calls (as opposed to partially written types). // // For example: X?.Y< // // In this case, this could never be a type, and we do not want to try to // resolve it as such as it can lead to inappropriate classifications. if (CouldBeGenericType(identifier)) { var types = semanticModel.LookupTypeRegardlessOfArity(identifier, cancellationToken); if (types.Any(s_shouldInclude)) { #nullable disable // Can 'GetClassificationForType(types.First()' be null here? result.Add(new ClassifiedSpan(identifier.Span, GetClassificationForType(types.First()))); #nullable enable } } }
public void TestSameEngineDifferentParameters() { ClassificationOptions co = new ClassificationOptions(); co.ClassifyByCharge = true; co.ClassifyByMissCleavage = true; co.ClassifyByModification = true; co.ModifiedAminoacids = "STY"; co.ClassifyByNumProteaseTermini = true; var s1 = new MascotPeptideTextFormat().ReadFromFile(TestContext.CurrentContext.TestDirectory + "/../../../data/deisotopic.peptides"); IdentifiedSpectrumUtils.RemoveSpectrumWithAmbigiousAssignment(s1); s1.ForEach(m => m.Tag = "deisotopic"); var s2 = new MascotPeptideTextFormat().ReadFromFile(TestContext.CurrentContext.TestDirectory + "/../../../data/deisotopic-top10.peptides"); IdentifiedSpectrumUtils.RemoveSpectrumWithAmbigiousAssignment(s2); s2.ForEach(m => m.Tag = "deisotopic-top"); var all = s1.Union(s2).ToList(); var p1 = new List <IIdentifiedSpectrum>(all); IdentifiedSpectrumUtils.KeepTopPeptideFromSameEngineDifferentParameters(p1, new ScoreFunction()); p1.ForEach(m => m.ClassificationTag = "deisotopic/deisotopic-top"); var bin1 = co.BuildSpectrumBin(p1); var p2 = new List <IIdentifiedSpectrum>(all); IdentifiedSpectrumUtils.KeepUnconflictPeptidesFromSameEngineDifferentParameters(p2, new ScoreFunction()); p2.ForEach(m => m.ClassificationTag = "deisotopic/deisotopic-top"); var bin2 = co.BuildSpectrumBin(p2); bin2.ForEach(m => { IdentifiedSpectrumUtils.KeepTopPeptideFromSameEngineDifferentParameters(m.Spectra, new ScoreFunction()); var n = bin1.Find(a => a.Condition.ToString().Equals(m.Condition.ToString())); Assert.AreEqual(m.Spectra.Count, n.Spectra.Count); //{ // if (m.Condition.ToString().Equals("deisotopic/deisotopic-top; Charge=2; MissCleavage=0; Modification=1; NumProteaseTermini=2")) // { // Assert.IsTrue(n.Spectra.Any(k => k.Query.FileScan.ShortFileName.Equals("20111128_CLi_v_4-2k_2mg_TiO2_iTRAQ,4992"))); // } // var diff1 = m.Spectra.Except(n.Spectra).ToList(); // Console.WriteLine(m.Condition.ToString() + " : " + diff1.Count.ToString()); // diff1.ForEach(k => // { // var lst = all.FindAll(l => l.Query.FileScan.LongFileName.Equals(k.Query.FileScan.LongFileName)); // lst.ForEach(q => Console.WriteLine(q.Query.FileScan.ShortFileName + "\t" + q.Tag + "\t" + q.Score.ToString() + "\t" + q.Sequence)); // }); //} }); }
protected static async Task <ImmutableArray <ClassifiedSpan> > GetSemanticClassificationsAsync(Document document, TextSpan span) { var service = document.GetRequiredLanguageService <IClassificationService>(); var options = ClassificationOptions.From(document.Project); using var _ = ArrayBuilder <ClassifiedSpan> .GetInstance(out var result); await service.AddSemanticClassificationsAsync(document, span, options, result, CancellationToken.None); return(result.ToImmutable()); }
public override void AddClassifications( SyntaxNode syntax, SemanticModel semanticModel, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, CancellationToken cancellationToken) { if (syntax is UsingDirectiveSyntax usingDirective) { ClassifyUsingDirectiveSyntax(usingDirective, semanticModel, result, cancellationToken); } }
public override void AddClassifications( SyntaxNode syntax, SemanticModel semanticModel, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, CancellationToken cancellationToken) { if (syntax is NameSyntax name) { ClassifyTypeSyntax(name, semanticModel, result, cancellationToken); } }
public IntellisenseQuickInfoBuilderContext( Document document, ClassificationOptions classificationOptions, IThreadingContext?threadingContext, IUIThreadOperationExecutor?operationExecutor, IAsynchronousOperationListener?asynchronousOperationListener, Lazy <IStreamingFindUsagesPresenter>?streamingPresenter) { Document = document; ClassificationOptions = classificationOptions; ThreadingContext = threadingContext; OperationExecutor = operationExecutor; StreamingPresenter = streamingPresenter; AsynchronousOperationListener = asynchronousOperationListener; }
internal static async Task <IntellisenseQuickInfoItem> BuildItemAsync( ITrackingSpan trackingSpan, CodeAnalysisQuickInfoItem quickInfoItem, Document document, ClassificationOptions classificationOptions, IThreadingContext threadingContext, IUIThreadOperationExecutor operationExecutor, IAsynchronousOperationListener asyncListener, Lazy <IStreamingFindUsagesPresenter> streamingPresenter, CancellationToken cancellationToken) { var context = new IntellisenseQuickInfoBuilderContext(document, classificationOptions, threadingContext, operationExecutor, asyncListener, streamingPresenter); var content = await BuildInteractiveContentAsync(quickInfoItem, context, cancellationToken).ConfigureAwait(false); return(new IntellisenseQuickInfoItem(trackingSpan, content)); }
public Worker( AbstractEmbeddedLanguageClassificationService service, SemanticModel semanticModel, TextSpan textSpan, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, ArrayBuilder <IEmbeddedLanguageClassifier> classifierBuffer, CancellationToken cancellationToken) { _service = service; _semanticModel = semanticModel; _textSpan = textSpan; _options = options; _result = result; _classifierBuffer = classifierBuffer; _cancellationToken = cancellationToken; }
private static async Task <ClassifiedSpan[]> GetClassifiedSpansForDocumentAsync( Document document, TextSpan textSpan, ClassificationOptions options, bool includeSyntacticClassifications, CancellationToken cancellationToken) { var classificationService = document.GetRequiredLanguageService <IClassificationService>(); using var _ = ArrayBuilder <ClassifiedSpan> .GetInstance(out var classifiedSpans); // Case 1 - Generated Razor documents: // In Razor, the C# syntax classifier does not run on the client. This means we need to return both // syntactic and semantic classifications. // Case 2 - C# and VB documents: // In C#/VB, the syntax classifier runs on the client. This means we only need to return semantic // classifications. // // Ideally, Razor will eventually run the classifier on their end so we can get rid of this special // casing: https://github.com/dotnet/razor-tooling/issues/5850 if (includeSyntacticClassifications) { // `removeAdditiveSpans` will remove token modifiers such as 'static', which we want to include in LSP. // `fillInClassifiedSpanGaps` includes whitespace in the results, which we don't care about in LSP. // Therefore, we set both optional parameters to false. var spans = await ClassifierHelper.GetClassifiedSpansAsync( document, textSpan, options, cancellationToken, removeAdditiveSpans : false, fillInClassifiedSpanGaps : false).ConfigureAwait(false); // The spans returned to us may include some empty spans, which we don't care about. var nonEmptySpans = spans.Where(s => !s.TextSpan.IsEmpty); classifiedSpans.AddRange(nonEmptySpans); } else { await classificationService.AddSemanticClassificationsAsync( document, textSpan, options, classifiedSpans, cancellationToken).ConfigureAwait(false); await classificationService.AddEmbeddedLanguageClassificationsAsync( document, textSpan, options, classifiedSpans, cancellationToken).ConfigureAwait(false); } // Classified spans are not guaranteed to be returned in a certain order so we sort them to be safe. classifiedSpans.Sort(ClassifiedSpanComparer.Instance); return(classifiedSpans.ToArray()); }
public override void AddClassifications( SyntaxNode syntax, SemanticModel semanticModel, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, CancellationToken cancellationToken) { var symbolInfo = semanticModel.GetSymbolInfo(syntax, cancellationToken); if (symbolInfo.Symbol is IMethodSymbol methodSymbol && methodSymbol.MethodKind == MethodKind.UserDefinedOperator) { var operatorSpan = GetOperatorTokenSpan(syntax); if (!operatorSpan.IsEmpty) { result.Add(new ClassifiedSpan(operatorSpan, ClassificationTypeNames.OperatorOverloaded)); } } }
public static async Task ProduceTagsAsync( TaggerContext <IClassificationTag> context, DocumentSnapshotSpan spanToTag, IClassificationService classificationService, ClassificationTypeMap typeMap, CancellationToken cancellationToken) { var document = spanToTag.Document; if (document == null) { return; } var options = ClassificationOptions.From(document.Project); // Don't block getting classifications on building the full compilation. This may take a significant amount // of time and can cause a very latency sensitive operation (copying) to block the user while we wait on this // work to happen. // // It's also a better experience to get classifications to the user faster versus waiting a potentially // large amount of time waiting for all the compilation information to be built. For example, we can // classify types that we've parsed in other files, or partially loaded from metadata, even if we're still // parsing/loading. For cross language projects, this also produces semantic classifications more quickly // as we do not have to wait on skeletons to be built. document = document.WithFrozenPartialSemantics(cancellationToken); spanToTag = new DocumentSnapshotSpan(document, spanToTag.SnapshotSpan); var classified = await TryClassifyContainingMemberSpanAsync( context, spanToTag, classificationService, typeMap, options, cancellationToken).ConfigureAwait(false); if (classified) { return; } // We weren't able to use our specialized codepaths for semantic classifying. // Fall back to classifying the full span that was asked for. await ClassifySpansAsync( context, spanToTag, classificationService, typeMap, options, cancellationToken).ConfigureAwait(false); }
private static async Task ClassifySpansAsync( TaggerContext <IClassificationTag> context, DocumentSnapshotSpan spanToTag, IClassificationService classificationService, ClassificationTypeMap typeMap, ClassificationOptions options, CancellationToken cancellationToken) { try { var document = spanToTag.Document; var snapshotSpan = spanToTag.SnapshotSpan; var snapshot = snapshotSpan.Snapshot; using (Logger.LogBlock(FunctionId.Tagger_SemanticClassification_TagProducer_ProduceTags, cancellationToken)) { using var _ = ArrayBuilder <ClassifiedSpan> .GetInstance(out var classifiedSpans); await classificationService.AddSemanticClassificationsAsync( document, snapshotSpan.Span.ToTextSpan(), options, classifiedSpans, cancellationToken).ConfigureAwait(false); foreach (var span in classifiedSpans) { context.AddTag(ClassificationUtilities.Convert(typeMap, snapshotSpan.Snapshot, span)); } var version = await document.Project.GetDependentSemanticVersionAsync(cancellationToken).ConfigureAwait(false); // Let the context know that this was the span we actually tried to tag. context.SetSpansTagged(SpecializedCollections.SingletonEnumerable(spanToTag)); context.State = version; } } catch (Exception e) when(FatalError.ReportAndPropagateUnlessCanceled(e, cancellationToken)) { throw ExceptionUtilities.Unreachable; } }
public override void AddClassifications( SyntaxToken token, SemanticModel semanticModel, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, CancellationToken cancellationToken) { if (_info.StringLiteralTokenKind != token.RawKind) { return; } if (!options.ColorizeRegexPatterns) { return; } // Do some quick syntactic checks before doing any complex work. if (!RegexPatternDetector.IsPossiblyPatternToken(token, _info.SyntaxFacts)) { return; } var detector = RegexPatternDetector.TryGetOrCreate(semanticModel.Compilation, _info); var tree = detector?.TryParseRegexPattern(token, semanticModel, cancellationToken); if (tree == null) { return; } var visitor = s_visitorPool.Allocate(); try { visitor.Result = result; AddClassifications(tree.Root, visitor, result); } finally { visitor.Result = null; s_visitorPool.Free(visitor); } }
public void Test() { ClassificationOptions option = new ClassificationOptions() { ClassifyByCharge = true, ClassifyByMissCleavage = true, ClassifyByModification = true, ModifiedAminoacids = "STY" }; XElement root = new XElement("Root"); option.Save(root); ClassificationOptions target = new ClassificationOptions(); target.Load(root); Assert.AreEqual(option.ClassifyByCharge, target.ClassifyByCharge); Assert.AreEqual(option.ClassifyByMissCleavage, target.ClassifyByMissCleavage); Assert.AreEqual(option.ClassifyByModification, target.ClassifyByModification); Assert.AreEqual(option.ModifiedAminoacids, target.ModifiedAminoacids); }
public static async Task <ValueTrackedItem?> TryCreateAsync(Document document, TextSpan textSpan, ISymbol symbol, ValueTrackedItem?parent = null, CancellationToken cancellationToken = default) { var excerptService = document.Services.GetService <IDocumentExcerptService>(); SourceText?sourceText = null; ImmutableArray <ClassifiedSpan> classifiedSpans = default; if (excerptService != null) { var result = await excerptService.TryExcerptAsync(document, textSpan, ExcerptMode.SingleLine, cancellationToken).ConfigureAwait(false); if (result.HasValue) { var value = result.Value; sourceText = value.Content; } } if (sourceText is null) { var options = ClassificationOptions.From(document.Project); var documentSpan = await ClassifiedSpansAndHighlightSpanFactory.GetClassifiedDocumentSpanAsync(document, textSpan, options, cancellationToken).ConfigureAwait(false); var classificationResult = await ClassifiedSpansAndHighlightSpanFactory.ClassifyAsync(documentSpan, options, cancellationToken).ConfigureAwait(false); classifiedSpans = classificationResult.ClassifiedSpans; var syntaxTree = await document.GetRequiredSyntaxTreeAsync(cancellationToken).ConfigureAwait(false); sourceText = await syntaxTree.GetTextAsync(cancellationToken).ConfigureAwait(false); } return(new ValueTrackedItem( SymbolKey.Create(symbol, cancellationToken), sourceText, classifiedSpans, textSpan, document.Id, symbol.GetGlyph(), parent: parent)); }
public ValueTask <SerializableClassifiedSpans> GetClassificationsAsync( Checksum solutionChecksum, DocumentId documentId, TextSpan span, ClassificationType type, ClassificationOptions options, bool isFullyLoaded, CancellationToken cancellationToken) { return(RunServiceAsync(solutionChecksum, async solution => { var document = solution.GetDocument(documentId) ?? await solution.GetSourceGeneratedDocumentAsync(documentId, cancellationToken).ConfigureAwait(false); Contract.ThrowIfNull(document); if (options.ForceFrozenPartialSemanticsForCrossProcessOperations) { // Frozen partial semantics is not automatically passed to OOP, so enable it explicitly when desired document = document.WithFrozenPartialSemantics(cancellationToken); } using var _ = ArrayBuilder <ClassifiedSpan> .GetInstance(out var temp); await AbstractClassificationService.AddClassificationsInCurrentProcessAsync( document, span, type, options, temp, cancellationToken).ConfigureAwait(false); if (isFullyLoaded) { // Once fully loaded, there's no need for us to keep around any of the data we cached in-memory // during the time the solution was loading. lock (_cachedData) _cachedData.Clear(); // Enqueue this document into our work queue to fully classify and cache. _workQueue.AddWork((document, type, options)); } return SerializableClassifiedSpans.Dehydrate(temp.ToImmutable()); }, cancellationToken)); }
protected static async Task <ImmutableArray <ClassifiedSpan> > GetSemanticClassificationsAsync(Document document, TextSpan span) { var tree = await document.GetSyntaxTreeAsync(); var service = document.GetLanguageService <ISyntaxClassificationService>(); var classifiers = service.GetDefaultSyntaxClassifiers(); var extensionManager = document.Project.Solution.Workspace.Services.GetService <IExtensionManager>(); var results = ArrayBuilder <ClassifiedSpan> .GetInstance(); var options = ClassificationOptions.From(document.Project); await service.AddSemanticClassificationsAsync( document, span, options, extensionManager.CreateNodeExtensionGetter(classifiers, c => c.SyntaxNodeTypes), extensionManager.CreateTokenExtensionGetter(classifiers, c => c.SyntaxTokenKinds), results, CancellationToken.None); return(results.ToImmutableAndFree()); }
private static async Task AddClassificationsAsync( IClassificationService classificationService, ClassificationOptions options, Document document, SnapshotSpan snapshotSpan, ArrayBuilder <ClassifiedSpan> classifiedSpans, ClassificationType type, CancellationToken cancellationToken) { if (type == ClassificationType.Semantic) { await classificationService.AddSemanticClassificationsAsync( document, snapshotSpan.Span.ToTextSpan(), options, classifiedSpans, cancellationToken).ConfigureAwait(false); } else if (type == ClassificationType.EmbeddedLanguage) { await classificationService.AddEmbeddedLanguageClassificationsAsync( document, snapshotSpan.Span.ToTextSpan(), options, classifiedSpans, cancellationToken).ConfigureAwait(false); } else { throw ExceptionUtilities.UnexpectedValue(type); } }
/// <summary> /// Returns the semantic tokens data for a given document with an optional range. /// </summary> internal static async Task <(int[], bool isFinalized)> ComputeSemanticTokensDataAsync( Document document, Dictionary <string, int> tokenTypesToIndex, LSP.Range?range, CancellationToken cancellationToken) { var root = await document.GetRequiredSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var text = await document.GetTextAsync(cancellationToken).ConfigureAwait(false); // By default we calculate the tokens for the full document span, although the user // can pass in a range if they wish. var textSpan = range is null ? root.FullSpan : ProtocolConversions.RangeToTextSpan(range, text); // If the full compilation is not yet available, we'll try getting a partial one. It may contain inaccurate // results but will speed up how quickly we can respond to the client's request. var frozenDocument = document.WithFrozenPartialSemantics(cancellationToken); var semanticModel = await frozenDocument.GetRequiredSemanticModelAsync(cancellationToken).ConfigureAwait(false); var isFinalized = document.Project.TryGetCompilation(out var compilation) && compilation == semanticModel.Compilation; document = frozenDocument; var options = ClassificationOptions.From(document.Project); var classifiedSpans = Classifier.GetClassifiedSpans(document.Project.Solution.Workspace.Services, semanticModel, textSpan, options, cancellationToken); Contract.ThrowIfNull(classifiedSpans, "classifiedSpans is null"); // Multi-line tokens are not supported by VS (tracked by https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1265495). // Roslyn's classifier however can return multi-line classified spans, so we must break these up into single-line spans. var updatedClassifiedSpans = ConvertMultiLineToSingleLineSpans(text, classifiedSpans.ToArray()); // TO-DO: We should implement support for streaming if LSP adds support for it: // https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1276300 return(ComputeTokens(text.Lines, updatedClassifiedSpans, tokenTypesToIndex), isFinalized); }
public void TestSameEngineDifferentParameters() { ClassificationOptions co = new ClassificationOptions(); co.ClassifyByCharge = true; co.ClassifyByMissCleavage = true; co.ClassifyByModification = true; co.ModifiedAminoacids = "STY"; co.ClassifyByNumProteaseTermini = true; var s1 = new MascotPeptideTextFormat().ReadFromFile(@"../../../data/deisotopic.peptides"); IdentifiedSpectrumUtils.RemoveSpectrumWithAmbigiousAssignment(s1); s1.ForEach(m => m.Tag = "deisotopic"); var s2 = new MascotPeptideTextFormat().ReadFromFile(@"../../../data/deisotopic-top10.peptides"); IdentifiedSpectrumUtils.RemoveSpectrumWithAmbigiousAssignment(s2); s2.ForEach(m => m.Tag = "deisotopic-top"); var all = s1.Union(s2).ToList(); var p1 = new List<IIdentifiedSpectrum>(all); IdentifiedSpectrumUtils.KeepTopPeptideFromSameEngineDifferentParameters(p1, new ScoreFunction()); p1.ForEach(m => m.ClassificationTag = "deisotopic/deisotopic-top"); var bin1 = co.BuildSpectrumBin(p1); var p2 = new List<IIdentifiedSpectrum>(all); IdentifiedSpectrumUtils.KeepUnconflictPeptidesFromSameEngineDifferentParameters(p2, new ScoreFunction()); p2.ForEach(m => m.ClassificationTag = "deisotopic/deisotopic-top"); var bin2 = co.BuildSpectrumBin(p2); bin2.ForEach(m => { IdentifiedSpectrumUtils.KeepTopPeptideFromSameEngineDifferentParameters(m.Spectra, new ScoreFunction()); var n = bin1.Find(a => a.Condition.ToString().Equals(m.Condition.ToString())); Assert.AreEqual(m.Spectra.Count, n.Spectra.Count); //{ // if (m.Condition.ToString().Equals("deisotopic/deisotopic-top; Charge=2; MissCleavage=0; Modification=1; NumProteaseTermini=2")) // { // Assert.IsTrue(n.Spectra.Any(k => k.Query.FileScan.ShortFileName.Equals("20111128_CLi_v_4-2k_2mg_TiO2_iTRAQ,4992"))); // } // var diff1 = m.Spectra.Except(n.Spectra).ToList(); // Console.WriteLine(m.Condition.ToString() + " : " + diff1.Count.ToString()); // diff1.ForEach(k => // { // var lst = all.FindAll(l => l.Query.FileScan.LongFileName.Equals(k.Query.FileScan.LongFileName)); // lst.ForEach(q => Console.WriteLine(q.Query.FileScan.ShortFileName + "\t" + q.Tag + "\t" + q.Score.ToString() + "\t" + q.Sequence)); // }); //} }); }
public virtual void AddClassifications(SyntaxToken syntax, SemanticModel semanticModel, ClassificationOptions options, ArrayBuilder <ClassifiedSpan> result, CancellationToken cancellationToken) { }