private void Rescan(ITokenizerResult tokenizerResult, RescanReason rescanReason, CancellationToken cancellationToken) { if (_resultsRequests.ContainsKey(tokenizerResult.Snapshot)) { return; } _resultsRequests.Add(tokenizerResult.Snapshot, RunAnalysisAsync(tokenizerResult, rescanReason, cancellationToken)); }
private void TokenizerUpdated(ITokenizerResult result, CancellationToken ct) { var tokens = result.UpdatedTokens; if (!tokens.Any()) { return; } _currentResult = result; var start = tokens.First().GetStart(result.Snapshot); var end = tokens.Last().GetEnd(result.Snapshot); TagsChanged?.Invoke(this, new SnapshotSpanEventArgs(new SnapshotSpan(result.Snapshot, new Span(start, end - start)))); }
private async Task <IAnalysisResult> RunParserAsync(ITokenizerResult tokenizerResult, RescanReason reason, CancellationToken cancellationToken) { // TODO: for the future "GoTo include" feature var includes = new List <IDocument>(); try { var parserResult = await _parser.RunAsync(_document, tokenizerResult.Snapshot, tokenizerResult.Tokens, cancellationToken); var analysisResult = new AnalysisResult(_document, parserResult, includes, tokenizerResult.Snapshot); CurrentResult = analysisResult; AnalysisUpdated?.Invoke(analysisResult, reason, cancellationToken); return(analysisResult); } catch (AggregateException /* tokenizer changed but plinq haven't checked CancellationToken yet */) { throw new OperationCanceledException(); } }
private static string TryGetDescriptionAbove(IDocumentTokenizer tokenizer, ITokenizerResult tokenizerResult, SnapshotPoint tokenEnd) { var lines = new LinkedList <string>(); var currentLineNumber = tokenEnd.GetContainingLine().LineNumber - 1; var snapshot = tokenEnd.Snapshot; while (currentLineNumber >= 0) { var currentLine = snapshot.GetLineFromLineNumber(currentLineNumber); var currentLineComment = tokenizerResult .GetTokens(new Span(currentLine.Start, currentLine.Length)) .ToList(); // if there are some other tokens, then the comment ended if (currentLineComment.Any(t => !tokenizer.IsTypeOf(t.Type, RadAsmTokenType.Comment) && !tokenizer.IsTypeOf(t.Type, RadAsmTokenType.Whitespace))) { break; } currentLineComment.RemoveAll(t => tokenizer.IsTypeOf(t.Type, RadAsmTokenType.Whitespace)); if (currentLineComment.Count != 1) { break; } var trackingToken = currentLineComment.First(); var tokenSpan = new SnapshotSpan(snapshot, trackingToken.GetSpan(snapshot)); var tokenText = GetCommentText(tokenSpan.GetText()); lines.AddFirst(tokenText); currentLineNumber = tokenSpan.Start.GetContainingLine().LineNumber - 1; } return(lines.Count != 0 ? string.Join(System.Environment.NewLine, lines) : null); }
private Task <IAnalysisResult> RunAnalysisAsync(ITokenizerResult tokenizerResult, RescanReason reason, CancellationToken cancellationToken) => Task.Run(() => RunParserAsync(tokenizerResult, reason, cancellationToken), cancellationToken);
private void ForceRescan(ITokenizerResult tokenizerResult, RescanReason rescanReason, CancellationToken cancellationToken) { _resultsRequests.Remove(tokenizerResult.Snapshot); _resultsRequests.Add(tokenizerResult.Snapshot, RunAnalysisAsync(tokenizerResult, rescanReason, cancellationToken)); }
private void TokenizerUpdated(ITokenizerResult tokenizerResult, CancellationToken cancellationToken) => Rescan(tokenizerResult, RescanReason.ContentChanged, cancellationToken);
private static string TryGetDescriptionToTheRight(IDocumentTokenizer tokenizer, ITokenizerResult tokenizerResult, SnapshotPoint tokenEnd) { var currentLine = tokenEnd.GetContainingLine(); var currentLineComment = tokenizerResult .GetTokens(new Span(tokenEnd, currentLine.End - tokenEnd)) .Where(t => tokenizer.IsTypeOf(t.Type, RadAsmTokenType.Comment)) .ToList(); if (currentLineComment.Count == 0) { return(null); } var text = currentLineComment .First() .GetText(tokenEnd.Snapshot); return(GetCommentText(text)); }