/// <summary> /// Perform a simple lexical styling of the given range of content. This will *not* /// correctly style many constructs, and it's not smart enough to understand /// context. But it's fast enough that it can style most content while you type, and /// it's accurate enough that it's good enough for a first pass. Various background /// threads will be responsible for running the full parser and re-styling the content /// more accurately afterward. /// </summary> /// <param name="editor">The editor that holds the content.</param> /// <param name="startPos">The starting character position within the text to style.</param> /// <param name="endPos">The ending character position within the text to style.</param> /// <param name="filename">The name of the file being styled (for generating errors)</param> public static void StyleRange(Scintilla editor, TextEditorControl owner, int startPos, int endPos, string filename) { try { // Ensure startPos/endPos actually refer to the document. ValidateStartAndEndPos(editor, ref startPos, ref endPos); // Get the real range of lines to style. int startLine = editor.LineFromPosition(startPos); int endLine = editor.LineFromPosition(endPos); // Remove all the error/warning marks in this range. owner.ClearLineMarks(startLine + 1, endLine - startLine + 1); // Now find the actual document start and end of the range we're going to color. int trueStartPos = editor.Lines[startLine].Position; int trueEndPos = editor.Lines[endLine].EndPosition; // Get the text itself in that range. string textSlice = editor.GetTextRange(trueStartPos, trueEndPos - trueStartPos); // Ask the real Smile Lexer to begin lexical analysis on it. SmileLibInterop.Lexer lexer = new SmileLibInterop.Lexer(textSlice, 0, textSlice.Length, filename, startLine + 1, 1, true); editor.StartStyling(trueStartPos); Token token; int lastPos = 0; Token previousToken = null; while ((token = lexer.Next()).Kind != TokenKind.EOI) { LexerPosition lexerPosition = token.Position; int tokenStartPos = lexerPosition.LineStart + lexerPosition.Column - 1; int tokenLength = lexerPosition.Length; if (tokenStartPos > lastPos) { // Token somehow skipped over some content, so style it plain. editor.SetStyling(tokenStartPos - lastPos, (int)StyleKind.Default); editor.IndicatorClearRange(trueStartPos + lastPos, tokenStartPos - lastPos); System.Diagnostics.Debug.WriteLine($"Warning: Lexer skipped {tokenStartPos - lastPos} characters at {lastPos}."); } else if (tokenStartPos < lastPos) { // For some reason, the previous token had too many characters in it, // so shorten this one by a bit to make up for that mistake. tokenLength -= lastPos - tokenStartPos; System.Diagnostics.Debug.WriteLine($"Warning: Lexer grabbed too many characters ({lastPos - tokenStartPos} extra) at {lastPos}."); } if (tokenLength > 0) { StyleKind styleKind = GetDefaultStyleKindForToken(previousToken, token); editor.SetStyling(tokenLength, (int)styleKind); if (styleKind == StyleKind.Meta_Error) { editor.IndicatorCurrent = (int)IndicatorKind.Error; editor.IndicatorFillRange(trueStartPos + tokenStartPos, tokenLength); owner.SetLineMark(lexerPosition.Line, IndicatorKind.Error); } else { editor.IndicatorClearRange(trueStartPos + tokenStartPos, tokenLength); } } lastPos = tokenStartPos + tokenLength; previousToken = IsSemanticToken(token) ? token : previousToken; } } catch (Exception e) { // Should never get here, but just in case, we swallow errors // and hope a later pass will restyle the content better. System.Diagnostics.Debug.WriteLine("Warning: LexicalStyler.StyleRange() crashed: " + e.Message); } }
public void Restyle() { // Get the current text from the editor. string text = null; string filename = null; RunOnWinFormsThread(() => { if (ShouldCancel) { return; } filename = _owner.Filename; text = _editor.Text; }); if (ShouldCancel) { return; } // First, do a fast easy pass over the input to find its maximum width, // so that we can keep the horizontal scrollbar up-to-date. MaximumInfo maximumInfo = CalculateMaximumInfo(text, _configurator.TextEditorConfig.Indentation); RunOnWinFormsThread(() => { if (ShouldCancel) { return; } _owner.SetMaximumInfo(maximumInfo); _owner.ClearLineMarks(); }); // Create a Smile Lexer to begin lexical analysis on it. SmileLibInterop.Lexer lexer = new SmileLibInterop.Lexer(text, 0, text.Length, filename, 1, 1, true); // Spin over the input, collect tokens in approximately ~64K chunks (or ~1024 tokens), // and pass them to the LexicalStyler class to style. int currentChunkSize = 0; const int MaximumChunkSize = 64 * 1024; const int TokenWeight = 8 * 8; // Approximate guess for how much memory a Token object uses. List <Token> tokens = new List <Token>(); Token token; while ((token = lexer.Next()).Kind != TokenKind.EOI) { tokens.Add(token); currentChunkSize += token.Position.Length + TokenWeight; if (currentChunkSize >= MaximumChunkSize) { if (ShouldCancel) { break; } RunOnWinFormsThread(() => { if (ShouldCancel) { return; } LexicalStyler.StyleTokens(_editor, _owner, tokens); }); tokens.Clear(); currentChunkSize = 0; if (ShouldCancel) { break; } } } if (tokens.Any()) { RunOnWinFormsThread(() => { if (ShouldCancel) { return; } LexicalStyler.StyleTokens(_editor, _owner, tokens); }); } }