private static List <Span> SearchClassificationChanges(ClassificationParseResult parseResult, IEnumerable <Span> changes) { var spansToRedraw = new List <Span>(); foreach (var c in changes) { var tokenSpans = ParseUtil.GetTokenSpans(parseResult, c); foreach (var si in tokenSpans) { switch (si.Type) { case SpanType.VerbatimString: case SpanType.RecursiveString: case SpanType.MultiLineComment: spansToRedraw.Add(si.Span); break; case SpanType.PreprocessorKeyword: spansToRedraw.Add(new Span(si.Span.Start, parseResult.Snapshot.Length - si.Span.Start)); break; } } } return(spansToRedraw); }
public IList<ClassificationSpan> GetClassificationSpans(SnapshotSpan span) { var parseResult = _lastParseResult; if (parseResult == null) if (ParseUtil.TryParse(_textBuffer, out parseResult)) _lastParseResult = parseResult; else return ClassifierUtils.EmptyClassifications; var textChanges = _lastTextChanges; if (textChanges != null) { _lastTextChanges = null; var handler = ClassificationChanged; if (handler != null) foreach (var s in SearchClassificationChanges(parseResult, textChanges.Select(c => c.NewSpan))) if (!(span.Span.Start < s.Start && s.End <= span.Span.End)) handler(this, new ClassificationChangedEventArgs(new SnapshotSpan(parseResult.Snapshot, s))); } var tokenSpans = ParseUtil.GetTokenSpans(_lastParseResult, span.Span); var result = new ClassificationSpan[tokenSpans.Count]; for (var i = 0; i < tokenSpans.Count; ++i) { var spanInfo = tokenSpans[i]; result[i] = new ClassificationSpan(new SnapshotSpan(_lastParseResult.Snapshot, spanInfo.Span), _classificationTypes[(int)spanInfo.Type]); } return result; }
public static List <SyntaxClassifier.SpanInfo> GetTokenSpans(ClassificationParseResult parseResult, Span span) { var tokenSpans = new List <SyntaxClassifier.SpanInfo>(16); foreach (var c in parseResult.Comments) { if (c.Position > span.End) { break; } var commentSpan = new Span(c.Position, c.Length); if (span.IntersectsWith(commentSpan)) { InsertClassification(tokenSpans, new SyntaxClassifier.SpanInfo(commentSpan, c.IsMultiline ? SyntaxClassifier.SpanType.MultiLineComment : SyntaxClassifier.SpanType.SingleLineComment)); if (c.Type != CommentType.Normal) { var textSpan = new Span(c.TextPosition, c.TextLength); SyntaxClassifier.SpanType spanType; switch (c.Type) { case CommentType.ToDo: spanType = SyntaxClassifier.SpanType.ToDoCommentText; break; case CommentType.Bug: spanType = SyntaxClassifier.SpanType.BugCommentText; break; case CommentType.Hack: spanType = SyntaxClassifier.SpanType.HackCommentText; break; default: spanType = default(SyntaxClassifier.SpanType); Trace.Assert(false); break; } InsertClassification(tokenSpans, new SyntaxClassifier.SpanInfo(textSpan, spanType)); } } } foreach (var d in parseResult.Directives) { if (d.Position > span.End) { break; } var directiveSpan = new Span(d.Position, d.Length); if (span.IntersectsWith(directiveSpan)) { InsertClassification(tokenSpans, new SyntaxClassifier.SpanInfo(directiveSpan, SyntaxClassifier.SpanType.PreprocessorKeyword)); } } List <Span> splices = null; // not used WalkTokens(parseResult.Tokens, parseResult.Snapshot, span, tokenSpans, false, ref splices); return(tokenSpans); }
public IList <ClassificationSpan> GetClassificationSpans(SnapshotSpan span) { var parseResult = _lastParseResult; if (parseResult == null) { if (ParseUtil.TryParse(_textBuffer, out parseResult)) { _lastParseResult = parseResult; } else { return(ClassifierUtils.EmptyClassifications); } } var textChanges = _lastTextChanges; if (textChanges != null) { _lastTextChanges = null; var handler = ClassificationChanged; if (handler != null) { foreach (var s in SearchClassificationChanges(parseResult, textChanges.Select(c => c.NewSpan))) { if (!(span.Span.Start < s.Start && s.End <= span.Span.End)) { handler(this, new ClassificationChangedEventArgs(new SnapshotSpan(parseResult.Snapshot, s))); } } } } var tokenSpans = ParseUtil.GetTokenSpans(_lastParseResult, span.Span); var result = new ClassificationSpan[tokenSpans.Count]; for (var i = 0; i < tokenSpans.Count; ++i) { var spanInfo = tokenSpans[i]; result[i] = new ClassificationSpan(new SnapshotSpan(_lastParseResult.Snapshot, spanInfo.Span), _classificationTypes[(int)spanInfo.Type]); } return(result); }
private void TextBuffer_Changed(object sender, TextContentChangedEventArgs e) { var parseResult = _lastParseResult; if (parseResult != null) { var handler = ClassificationChanged; if (handler != null) { foreach (var s in SearchClassificationChanges(parseResult, e.Changes.Select(c => c.OldSpan))) { handler(this, new ClassificationChangedEventArgs(new SnapshotSpan(parseResult.Snapshot, s))); } } } _lastParseResult = null; _lastTextChanges = e.Changes; }
public static List<SyntaxClassifier.SpanInfo> GetTokenSpans(ClassificationParseResult parseResult, Span span) { var tokenSpans = new List<SyntaxClassifier.SpanInfo>(16); foreach (var c in parseResult.Comments) { if (c.Position > span.End) break; var commentSpan = new Span(c.Position, c.Length); if (span.IntersectsWith(commentSpan)) { InsertClassification(tokenSpans, new SyntaxClassifier.SpanInfo(commentSpan, c.IsMultiline ? SyntaxClassifier.SpanType.MultiLineComment : SyntaxClassifier.SpanType.SingleLineComment)); if (c.Type != CommentType.Normal) { var textSpan = new Span(c.TextPosition, c.TextLength); SyntaxClassifier.SpanType spanType; switch (c.Type) { case CommentType.ToDo: spanType = SyntaxClassifier.SpanType.ToDoCommentText; break; case CommentType.Bug: spanType = SyntaxClassifier.SpanType.BugCommentText; break; case CommentType.Hack: spanType = SyntaxClassifier.SpanType.HackCommentText; break; default: spanType = default(SyntaxClassifier.SpanType); Trace.Assert(false); break; } InsertClassification(tokenSpans, new SyntaxClassifier.SpanInfo(textSpan, spanType)); } } } foreach (var d in parseResult.Directives) { if (d.Position > span.End) break; var directiveSpan = new Span(d.Position, d.Length); if (span.IntersectsWith(directiveSpan)) InsertClassification(tokenSpans, new SyntaxClassifier.SpanInfo(directiveSpan, SyntaxClassifier.SpanType.PreprocessorKeyword)); } List<Span> splices = null; // not used WalkTokens(parseResult.Tokens, parseResult.Snapshot, span, tokenSpans, false, ref splices); return tokenSpans; }
public static bool TryParse(ITextBuffer textBuffer, out ClassificationParseResult parseResult) { var snapshot = textBuffer.CurrentSnapshot; ClassificationParseResult lastParseResult; if (textBuffer.Properties.TryGetProperty(typeof(ClassificationParseResult), out lastParseResult) && lastParseResult.Snapshot.Version == snapshot.Version) { parseResult = lastParseResult; return true; } NemerleSource source; if (!textBuffer.Properties.TryGetProperty(typeof(NemerleSource), out source)) { parseResult = null; return false; } var engine = source.GetEngine(); if (!engine.RequestOnInitEngine()) { parseResult = null; return false; } var timer = Stopwatch.StartNew(); var code = snapshot.GetText(); var lexer = new HighlightingLexer(engine, code); var preParser = new PreParser(lexer); var tokens = preParser.ParseTopLevel(); var _comments = lexer.GetComments(); var directives = lexer.GetDirectives(); var comments = new Comment[_comments.Length]; for (var i = 0; i < _comments.Length; ++i) { var c = _comments[i]; var type = CommentType.Normal; var pos = 0; var commentParser = c.IsMultiline ? _multiLineCommentParser : _singleLineCommentParser; var match = commentParser.Match(code, c.Position, c.Length); if (match.Success) { if (match.Groups[1].Success) { pos = match.Groups[1].Index; type = CommentType.ToDo; } else if (match.Groups[2].Success) { pos = match.Groups[2].Index; type = CommentType.Bug; } else if (match.Groups[3].Success) { pos = match.Groups[3].Index; type = CommentType.Hack; } } comments[i] = new Comment(c, type, pos); } timer.Stop(); Debug.Print("SyntaxClassifier.TryParse: {0}", timer.Elapsed); parseResult = new ClassificationParseResult(snapshot, tokens.Child, comments, directives); textBuffer.Properties[typeof(ClassificationParseResult)] = parseResult; return true; }
private static List<Span> SearchClassificationChanges(ClassificationParseResult parseResult, IEnumerable<Span> changes) { var spansToRedraw = new List<Span>(); foreach (var c in changes) { var tokenSpans = ParseUtil.GetTokenSpans(parseResult, c); foreach (var si in tokenSpans) { switch (si.Type) { case SpanType.VerbatimString: case SpanType.RecursiveString: case SpanType.MultiLineComment: spansToRedraw.Add(si.Span); break; case SpanType.PreprocessorKeyword: spansToRedraw.Add(new Span(si.Span.Start, parseResult.Snapshot.Length - si.Span.Start)); break; } } } return spansToRedraw; }
private void TextBuffer_Changed(object sender, TextContentChangedEventArgs e) { var parseResult = _lastParseResult; if (parseResult != null) { var handler = ClassificationChanged; if (handler != null) foreach (var s in SearchClassificationChanges(parseResult, e.Changes.Select(c => c.OldSpan))) handler(this, new ClassificationChangedEventArgs(new SnapshotSpan(parseResult.Snapshot, s))); } _lastParseResult = null; _lastTextChanges = e.Changes; }
public static bool TryParse(ITextBuffer textBuffer, out ClassificationParseResult parseResult) { var snapshot = textBuffer.CurrentSnapshot; ClassificationParseResult lastParseResult; if (textBuffer.Properties.TryGetProperty(typeof(ClassificationParseResult), out lastParseResult) && lastParseResult.Snapshot.Version == snapshot.Version) { parseResult = lastParseResult; return(true); } NemerleSource source; if (!textBuffer.Properties.TryGetProperty(typeof(NemerleSource), out source)) { parseResult = null; return(false); } var engine = source.GetEngine(); if (!engine.RequestOnInitEngine()) { parseResult = null; return(false); } var timer = Stopwatch.StartNew(); var code = snapshot.GetText(); var lexer = new HighlightingLexer(engine, code); var preParser = new PreParser(lexer); var tokens = preParser.ParseTopLevel(); var _comments = lexer.GetComments(); var directives = lexer.GetDirectives(); var comments = new Comment[_comments.Length]; for (var i = 0; i < _comments.Length; ++i) { var c = _comments[i]; var type = CommentType.Normal; var pos = 0; var commentParser = c.IsMultiline ? _multiLineCommentParser : _singleLineCommentParser; var match = commentParser.Match(code, c.Position, c.Length); if (match.Success) { if (match.Groups[1].Success) { pos = match.Groups[1].Index; type = CommentType.ToDo; } else if (match.Groups[2].Success) { pos = match.Groups[2].Index; type = CommentType.Bug; } else if (match.Groups[3].Success) { pos = match.Groups[3].Index; type = CommentType.Hack; } } comments[i] = new Comment(c, type, pos); } timer.Stop(); Debug.Print("SyntaxClassifier.TryParse: {0}", timer.Elapsed); parseResult = new ClassificationParseResult(snapshot, tokens.Child, comments, directives); textBuffer.Properties[typeof(ClassificationParseResult)] = parseResult; return(true); }