public IChameleonNode ReSync(CachingLexer cachingLexer, TreeTextRange changedRange, int insertedTextLen) { TreeOffset currStartOffset = GetTreeStartOffset(); int currLength = GetTextLength(); Logger.Assert(changedRange.StartOffset >= currStartOffset && changedRange.EndOffset <= (currStartOffset + currLength), "changedRange.StartOffset >= currStartOffset && changedRange.EndOffset <= (currStartOffset+currLength)"); int newLength = currLength - changedRange.Length + insertedTextLen; LanguageService languageService = Language.LanguageService(); if (languageService != null) { var parser = (IPsiParser)languageService.CreateParser(new ProjectedLexer(cachingLexer, new TextRange(currStartOffset.Offset, currStartOffset.Offset + newLength)), GetPsiModule(), GetSourceFile()); TreeElement newElement = parser.ParseStatement(); if (newElement.GetTextLength() == 0) { return(null); } if ((newElement.GetTextLength() == newLength) && (";".Equals(newElement.GetText().Substring(newElement.GetTextLength() - 1)))) { var psiFile = GetContainingNode <PsiFile>(); if (psiFile != null) { psiFile.ClearTables(); } return(newElement as IRuleDeclaration); } } return(null); }
protected override void ProcessLeafElement(TreeElement leafElement) { var leafOffset = GetLeafOffset(leafElement).Offset; if (myLexer.TokenType != null && myLexer.TokenStart < leafOffset) { var anchor = leafElement; var parent = anchor.parent; while (anchor == parent.FirstChild && parent.parent != null) { anchor = parent; parent = parent.parent; } while (myLexer.TokenType != null && myLexer.TokenStart < leafOffset) { var token = CreateMissingToken(); parent.AddChildBefore(token, anchor); var skipTo = myLexer.TokenStart + token.GetTextLength(); while (myLexer.TokenType != null && myLexer.TokenStart < skipTo) { myLexer.Advance(); } } } var leafEndOffset = leafOffset + leafElement.GetTextLength(); while (myLexer.TokenType != null && myLexer.TokenStart < leafEndOffset) { myLexer.Advance(); } }
protected override void ProcessLeafElement(TreeElement element) { int leafOffset = GetLeafOffset(element).Offset; // Check if some tokens are missed before this leaf if (myLexer.TokenType != null && myLexer.TokenStart < leafOffset) { // Find out the right place to insert tokens to TreeElement anchor = element; CompositeElement parent = anchor.parent; while (anchor == parent.firstChild && parent.parent != null) { anchor = parent; parent = parent.parent; } // proceed with inserting tokens while (myLexer.TokenType != null && myLexer.TokenStart < leafOffset) { LeafElementBase token = CreateMissingToken(); parent.AddChildBefore(token, anchor); myLexer.Advance(); } } // skip all tokens which lie inside given leaf element int leafEndOffset = leafOffset + element.GetTextLength(); if ((element is IClosedChameleonBody) && (myLexer is CachingLexer)) { ((CachingLexer)myLexer).FindTokenAt(leafEndOffset); } else { while (myLexer.TokenType != null && myLexer.TokenStart < leafEndOffset) { myLexer.Advance(); } } }