private ParseItemList Parse(ITextProvider text, IItemFactory itemFactory, ITokenStream stream, IParsingExecutionContext context) { var results = new ParseItemList(); while (!context.IsCancellationRequested && stream.Current.Type != TokenType.EndOfFile) { int position = stream.Position; ParseItem item; if (!itemFactory.TryCreate(null, text, stream, out item)) break; if (item.Parse(itemFactory, text, stream)) results.Add(item); // guard against infinite loop (in case token couldn't be handled) if (stream.Position == position) stream.Advance(); } // freeze everything if (!context.IsCancellationRequested) foreach (var item in results) item.Freeze(); return results; }
internal CssItemsChangedEventArgs( ParseItemList deletedItems, ParseItemList insertedItems, ParseItemList errorsChangedItems) { DeletedItems = deletedItems ?? new ParseItemList(); InsertedItems = insertedItems ?? new ParseItemList(); ErrorsChangedItems = errorsChangedItems ?? new ParseItemList(); }
private static void Update(ParseItem rule, ITextBuffer buffer) { CssErrorTagger tagger = CssErrorTagger.FromTextBuffer(buffer); ParseItemList list = new ParseItemList() { rule }; tagger.RecheckItems(list); }
private static void Update(ParseItem rule, ITextBuffer buffer) { try { CssErrorTagger tagger = CssErrorTagger.FromTextBuffer(buffer); ParseItemList list = new ParseItemList() { rule }; tagger.RecheckItems(list); } catch { /* Couldn't get error tagger. Ignore. */ } }
private static void Update(ParseItem rule, CssTree tree) { BindingFlags flags = BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.InvokeMethod; object[] parameters = new object[3]; parameters[0] = new ParseItemList(); parameters[1] = new ParseItemList(); parameters[2] = new ParseItemList() { rule }; typeof(CssTree).InvokeMember("FireOnItemsChanged", flags, null, tree, parameters); }
protected virtual void Parse(IIntellisenseContainer container, ParseItemList items, ITextProvider text) { if (items.Count == 0) { return; } foreach (var item in items) { container.Add(item, text); } Containers.Add(container); }
static IEnumerable<ParseItem> GetItems(ParseItemList items, SnapshotSpan span) { foreach (var item in items) { if (item.Start <= span.End && item.End >= span.Start) { yield return item; var container = item as IParseItemContainer; if (container != null) { foreach (var child in GetItems(container.Children, span)) yield return child; } } } }
private void DumpTree(ParseItemList items, int depth) { //var indent = new string(' ', depth); var indent = ""; for (int i = 0; i < depth; i++) { indent += "| "; } //indent += "| "; foreach (var item in items) { //if (item is BlockItem) //{ // var block = item as BlockItem; // if (block.CloseCurlyBrace == null) // { // Logger.Log(string.Format("{0} of {1} on line {2}", block.GetType().Name, block.Parent.GetType().Name, Tree.SourceText.GetLineFromPosition(block.OpenCurlyBrace.Start).LineNumber)); // } //} OutputLogger.Log(string.Format("{0} {1}", indent, item.GetType().Name)); //string content = string.Empty; //if (item is TokenItem && ((item as TokenItem).SourceType == TokenType.String || (item as TokenItem).SourceType == TokenType.BadString)) // content = snapshot.GetText(item.Start, item.Length).Replace("\r","\\r").Replace("\n","\\n"); //if (!string.IsNullOrEmpty(content)) // Logger.Log(string.Format("{0} {1} - {2}", indent, item.GetType().Name, content)); var complex = item as ComplexItem; if (complex != null) { DumpTree(complex.Children, depth + 1); } var simplex = item as SimplexItem; if (simplex != null) { DumpTree(simplex.Children, depth + 1); } } }
static IEnumerable <ParseItem> GetItems(ParseItemList items, SnapshotSpan span) { foreach (var item in items) { if (item.Start <= span.End && item.End >= span.Start) { yield return(item); var container = item as IParseItemContainer; if (container != null) { foreach (var child in GetItems(container.Children, span)) { yield return(child); } } } } }
private bool IsMassiveChange(ParseItemList deletedItems, ParseItemList insertedItems) { // If over half of the root stylesheet changed, then the change is massive. // That should catch formatting or deleting the whole document. // Massive changes inside of @directives aren't detected. if (insertedItems.Count > 0 && insertedItems[0].Parent == StyleSheet && insertedItems.Count > StyleSheet.Children.Count / 2) { return(true); } if (deletedItems.Count > 0 && deletedItems[0].Parent == StyleSheet && deletedItems.Count > StyleSheet.Children.Count) { return(true); } return(false); }
IEnumerable <BlockItem> GetBlocks(ParseItemList items, int start, int end) { foreach (var item in items) { if (item.Start <= end && item.End >= start) { var container = item as ComplexItem; if (container != null) { if (container is BlockItem) { yield return(container as BlockItem); } foreach (var child in GetBlocks(container.Children, start, end)) { yield return(child); } } } } }
private static void RunPseudoTest(IEnumerable <PseudoTest> tests) { foreach (PseudoTest test in tests) { ITextProvider text = new StringTextProvider(test.SelectorText); TokenStream tokens = Helpers.MakeTokenStream(text); Selector selector = new Selector(); Assert.IsTrue(selector.Parse(new ItemFactory(text, null), text, tokens)); Assert.IsTrue(selector.SimpleSelectors.Count > 0); ParseItemList subs = selector.SimpleSelectors[0].SubSelectors; Assert.IsTrue(subs.Count > 0); Assert.IsInstanceOfType(subs[0], test.ParseType); Assert.AreEqual(test.IsValid, selector.IsValid); Assert.AreEqual(test.IsValid, subs[0].IsValid); if (!test.IsValid) { int errorCount = 0; Assert.IsTrue(subs[0].ContainsParseErrors); IEnumerable <ParseError> aggregateErrors = subs[0].HasParseErrors ? subs[0].ParseErrors : ((ComplexItem)subs[0]).Children.SelectMany(s => s.ParseErrors); foreach (ParseError error in aggregateErrors) { errorCount++; Assert.AreEqual(test.Error, error.ErrorType); } Assert.AreEqual(1, errorCount); } } }
private void DumpTree(ParseItemList items, int depth) { //var indent = new string(' ', depth); var indent = ""; for (int i = 0; i < depth; i++) { indent += "| "; } //indent += "| "; foreach (var item in items) { //if (item is BlockItem) //{ // var block = item as BlockItem; // if (block.CloseCurlyBrace == null) // { // Logger.Log(string.Format("{0} of {1} on line {2}", block.GetType().Name, block.Parent.GetType().Name, Tree.SourceText.GetLineFromPosition(block.OpenCurlyBrace.Start).LineNumber)); // } //} OutputLogger.Log(string.Format("{0} {1}", indent, item.GetType().Name)); //string content = string.Empty; //if (item is TokenItem && ((item as TokenItem).SourceType == TokenType.String || (item as TokenItem).SourceType == TokenType.BadString)) // content = snapshot.GetText(item.Start, item.Length).Replace("\r","\\r").Replace("\n","\\n"); //if (!string.IsNullOrEmpty(content)) // Logger.Log(string.Format("{0} {1} - {2}", indent, item.GetType().Name, content)); var complex = item as ComplexItem; if (complex != null) DumpTree(complex.Children, depth + 1); var simplex = item as SimplexItem; if (simplex != null) DumpTree(simplex.Children, depth + 1); } }
private void FireOnItemsChanged( ParseItemList deletedItems, ParseItemList insertedItems, ParseItemList errorsChangedItems) { #if DEBUG Debug.Assert(IsOwnerThread); for (int i = 0; i + 1 < insertedItems.Count; i++) { // Inserted items must not overlap (no need to add children as well as their parent) Debug.Assert(insertedItems[i].AfterEnd <= insertedItems[i + 1].Start); } #endif if (IsMassiveChange(deletedItems, insertedItems)) { TreeUpdated?.Invoke(this, new CssTreeUpdateEventArgs(this)); } else { ItemsChanged?.Invoke(this, new CssItemsChangedEventArgs(deletedItems, insertedItems, errorsChangedItems)); } }
private static void Update(ParseItem rule, CssTree tree) { const BindingFlags flags = BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.InvokeMethod; object[] parameters = new object[3]; parameters[0] = new ParseItemList(); parameters[1] = new ParseItemList(); parameters[2] = new ParseItemList() { rule }; typeof(CssTree).InvokeMember("FireOnItemsChanged", flags, null, tree, parameters); }
protected virtual void Parse(IIntellisenseContainer container, ParseItemList items, ITextProvider text) { if (items.Count == 0) return; foreach (var item in items) container.Add(item, text); Containers.Add(container); }
public VariableDefinition(ExpresionMode mode = ExpresionMode.None) { Mode = mode; Values = new ParseItemList(); }
public MediaQueryExpression() { FeatureValues = new ParseItemList(); }
public PropertyName() { Fragments = new ParseItemList(); }
public MediaExpression() { Values = new ParseItemList(); }
public FunctionArgument() { ArgumentItems = new ParseItemList(); }
public FunctionArgument() { Values = new ParseItemList(); }
public ConditionalControlDirective() { ConditionStatements = new ParseItemList(); }
public Declaration() { Context = CssClassifierContextCache.FromTypeEnum(CssClassifierContextType.PropertyDeclaration); Values = new ParseItemList(); }
public Function() { Arguments = new ParseItemList(); Context = CssClassifierContextCache.FromTypeEnum(CssClassifierContextType.FunctionName); }
public PropertyDeclaration() { Values = new ParseItemList(); }
public ComplexItem() { Children = new ParseItemList(); Children.AutoParent = this; }
public void OnTextChange(ITextProvider fullNewText, int changeStart, int deletedLength, int insertedLength) { Debug.Assert(IsOwnerThread, "CssTree.OnTextChange must be called on the main thread"); if (StyleSheet == null || !IsOwnerThread) { return; } #if DEBUG_INCREMENTAL_PARSE DateTime startTime = DateTime.UtcNow; #endif // Figure out which tokens changed ICssParser parser = _parserFactory.CreateParser(); IncrementalTokenizer.Result tokenResult = IncrementalTokenizer.TokenizeChange( parser.TokenizerFactory, Tokens, TextProvider, fullNewText, changeStart, deletedLength, insertedLength); // Adjust the input to match what was actually tokenized changeStart = tokenResult.TokenizationStart; deletedLength = tokenResult.TextDeletedLength; insertedLength = tokenResult.TextInsertedLength; // Figure out where to start incrementally parsing IIncrementalParseItem parentItem = GetIncrementalParseParent(changeStart, deletedLength); ComplexItem parentComplexItem = (ComplexItem)parentItem; int firstReparseChild = FindFirstChildToReparse(parentComplexItem, tokenResult.OldTokenStart); int firstCleanChild = FindFirstCleanChild(parentComplexItem, tokenResult.OldTokenStart + tokenResult.OldTokenCount); int firstCleanTokenAfterParent = FindFirstCleanTokenAfterParent(parentComplexItem); using (CreateWriteLock()) { // Update the tokens and text IncrementalTokenizer.ApplyResult(tokenResult); firstCleanTokenAfterParent += tokenResult.NewTokens.Count - tokenResult.OldTokenCount; // Init the token stream for parsing TokenStream tokenStream = new TokenStream(Tokens); int streamPositionStart = FindTokenToStartParsing(parentComplexItem, firstReparseChild); tokenStream.Position = streamPositionStart; Debug.Assert(tokenStream.Position <= tokenResult.OldTokenStart); // Init parsing ItemFactory itemFactory = new ItemFactory(parser.ExternalItemFactory, fullNewText, tokenStream); tokenStream.SkipComments = true; // must be set after extracting comments // Init the old and new child lists ParseItemList oldChildren = parentComplexItem.Children; ParseItemList newChildren = new ParseItemList(); ParseItemList deletedChildren = new ParseItemList(); ParseItemList errorsChangedItems = new ParseItemList(); int deleteChildCount = oldChildren.Count - firstReparseChild; // CreateNextChild needs to know the previous child for context ParseItem prevChild = (firstReparseChild > 0) ? oldChildren[firstReparseChild - 1] : null; while (true) { ParseItem newChild = parentItem.CreateNextChild(prevChild, itemFactory, fullNewText, tokenStream); if (newChild != null) { // Are we done parsing yet? if (newChild.Start >= changeStart + insertedLength) { // See if this new child exactly matches an old child int oldChildIndex = oldChildren.FindInsertIndex(newChild.Start, beforeExisting: true); ParseItem oldChild = (oldChildIndex < oldChildren.Count) ? oldChildren[oldChildIndex] : null; if (oldChild != null && oldChildIndex >= firstCleanChild && oldChild.Start == newChild.Start && oldChild.Length == newChild.Length && oldChild.GetType() == newChild.GetType()) { // Found a match, stop parsing deleteChildCount = oldChildIndex - firstReparseChild; break; } } newChildren.Add(newChild); prevChild = newChild; } else if (tokenStream.Position != firstCleanTokenAfterParent) { // When the parse doesn't stop exactly on the first clean token after the parent, // then the tree structure changed too much. Just fall back to a full parse: ParseNewStyleSheet(fullNewText, Tokens); //Debug.WriteLine("CSS: Full parse:{0}ms", (DateTime.UtcNow - startTime).TotalMilliseconds); return; } else { break; } } // Replace items in the parent (saving the deleted items for later) oldChildren.RemoveRange(firstReparseChild, deleteChildCount, deletedChildren); oldChildren.AddRange(newChildren); if (oldChildren.Count == 0) { // The parent was deleted, currently can't deal with that as an incremental change ParseNewStyleSheet(fullNewText, Tokens); return; } // Collect comments over the parsed region tokenStream.SkipComments = false; int tokenCount = tokenStream.Position - streamPositionStart; tokenStream.Position = streamPositionStart; IList <Comment> comments = parser.ExtractComments(fullNewText, Tokens, tokenStream.Position, tokenCount); // All done parsing and updating the tree, so now update caches and fire the "on changed" event StyleSheet.TextProvider = fullNewText; parentItem.UpdateCachedChildren(); if (parentItem.UpdateParseErrors()) { errorsChangedItems.Add(parentComplexItem); } InsertComments(comments, newChildren); #if DEBUG_INCREMENTAL_PARSE Debug.WriteLine("CSS: Inc parse:{0}ms. Deleted:{1}, Inserted:{2}", (DateTime.UtcNow - startTime).TotalMilliseconds, deletedChildren.Count, newChildren.Count); VerifyTokensAfterIncrementalChange(parser.TokenizerFactory, fullNewText, Tokens); VerifyTreeAfterIncrementalParse(fullNewText, Tokens, StyleSheet); #endif FireOnItemsChanged(deletedChildren, newChildren, errorsChangedItems); // Clean up the deleted items (must be after the event is fired) foreach (ParseItem deletedItem in deletedChildren) { deletedItem.Parent = null; } } }
} // +, >, ~, /id/ public SimpleSelector() { SubSelectors = new ParseItemList(); }