//copied from MoonlightEditorExtension #region Document outline protected override void RefillOutlineStore (ParsedDocument doc, Gtk.TreeStore store) { XDocument xdoc = ((MonoDevelop.Xml.Editor.XmlParsedDocument)doc).XDocument; if (xdoc == null) return; // Gtk.TreeIter iter = outlineTreeStore.AppendValues (System.IO.Path.GetFileName (CU.Document.FilePath), p); BuildTreeChildren (store, Gtk.TreeIter.Zero, xdoc); }
public override void Dispose() { if (disposed) return; disposed = true; if (Document != null) Document.DocumentParsed -= UpdateDocumentOutline; RemoveRefillOutlineStoreTimeout (); lastCU = null; base.Dispose (); }
public void before_each_test_setup() { TargetContainer = new SummarizationInformationContainer(); TargetParsedDocument = new ParsedDocument(); TargetParsedDocument.Sentences = new List<Sentence>() { TargetContainer.sentence }; TargetContentAnalyzer = Substitute.For<IContentAnalyzer>(); TargetContentAnalyzer.GetImportantTextUnits(Arg.Any<List<Sentence>>()).Returns(TargetContainer.scoredTextUnits); TargetContentAnalyzer.ScoreSentences(Arg.Any<List<Sentence>>(), Arg.Any<List<TextUnitScore>>()).Returns(TargetContainer.scoredSentences); }
public override void Initialize () { base.Initialize (); Parser parser = new Parser (CreateRootState (), false); tracker = new DocumentStateTracker<Parser> (parser, Editor); Document.DocumentParsed += delegate { lastCU = Document.ParsedDocument; OnParsedDocumentUpdated (); }; if (Document.ParsedDocument != null) { lastCU = Document.ParsedDocument; OnParsedDocumentUpdated (); } }
IEnumerable<DomRegion> SearchMember (IEntity member, ITypeResolveContext dom, FilePath fileName, Mono.TextEditor.TextEditorData editor, Mono.TextEditor.TextDocument buildDocument, List<LocalDocumentInfo.OffsetInfo> offsetInfos, ParsedDocument parsedDocument) { // TODO: Type system conversion. yield break; // var resolver = new NRefactoryResolver (dom, parsedDocument.CompilationUnit, ICSharpCode.OldNRefactory.SupportedLanguage.CSharp, editor, fileName); // // var visitor = new FindMemberAstVisitor (buildDocument, member); // visitor.IncludeXmlDocumentation = IncludeDocumentation; // visitor.RunVisitor (resolver); // // foreach (var result in visitor.FoundReferences) { // var offsetInfo = offsetInfos.FirstOrDefault (info => info.ToOffset <= result.Position && result.Position < info.ToOffset + info.Length); // if (offsetInfo == null) // continue; // var offset = offsetInfo.FromOffset + result.Position - offsetInfo.ToOffset; // var loc = editor.OffsetToLocation (offset); // yield return new DomRegion (fileName, loc.Line, loc.Column, loc.Line, loc.Column + result.Name.Lenhth); // } }
public override void Initialize () { base.Initialize (); UpdateOwnerProjects (); Parser parser = new Parser (CreateRootState (), false); tracker = new DocumentStateTracker<Parser> (parser, Editor); Document.DocumentParsed += delegate { lastCU = Document.ParsedDocument; OnParsedDocumentUpdated (); }; if (Document.ParsedDocument != null) { lastCU = Document.ParsedDocument; OnParsedDocumentUpdated (); } if (IdeApp.Workspace != null) { IdeApp.Workspace.FileAddedToProject += HandleProjectChanged; IdeApp.Workspace.FileRemovedFromProject += HandleProjectChanged; } }
Widget MonoDevelop.DesignerSupport.IOutlinedDocument.GetOutlineWidget () { if (outlineTreeView != null) return outlineTreeView; outlineTreeStore = new TreeStore (typeof(object)); outlineTreeModelSort = new TreeModelSort (outlineTreeStore); settings = ClassOutlineSettings.Load (); comparer = new ClassOutlineNodeComparer (GetAmbience (), settings, outlineTreeModelSort); outlineTreeModelSort.SetSortFunc (0, comparer.CompareNodes); outlineTreeModelSort.SetSortColumnId (0, SortType.Ascending); outlineTreeView = new MonoDevelop.Ide.Gui.Components.PadTreeView (outlineTreeStore); var pixRenderer = new CellRendererPixbuf (); pixRenderer.Xpad = 0; pixRenderer.Ypad = 0; outlineTreeView.TextRenderer.Xpad = 0; outlineTreeView.TextRenderer.Ypad = 0; TreeViewColumn treeCol = new TreeViewColumn (); treeCol.PackStart (pixRenderer, false); treeCol.SetCellDataFunc (pixRenderer, new TreeCellDataFunc (OutlineTreeIconFunc)); treeCol.PackStart (outlineTreeView.TextRenderer, true); treeCol.SetCellDataFunc (outlineTreeView.TextRenderer, new TreeCellDataFunc (OutlineTreeTextFunc)); outlineTreeView.AppendColumn (treeCol); outlineTreeView.HeadersVisible = false; outlineTreeView.Selection.Changed += delegate { JumpToDeclaration (false); }; outlineTreeView.RowActivated += delegate { JumpToDeclaration (true); }; this.lastCU = Document.ParsedDocument; outlineTreeView.Realized += delegate { RefillOutlineStore (); }; UpdateSorting (); var sw = new CompactScrolledWindow (); sw.Add (outlineTreeView); sw.ShowAll (); return sw; }
public override ParsedDocument Parse(ProjectDom dom, string fileName, string content) { ParsedDocument doc = new ParsedDocument(fileName); if (null == doc.CompilationUnit) { doc.CompilationUnit = new CompilationUnit(fileName); } CompilationUnit cu = (CompilationUnit)doc.CompilationUnit; DomType currentFile = null; DomProperty currentRegion = null; string eol = Environment.NewLine; Match eolMatch = eolExpression.Match(content); if (eolMatch != null && eolMatch.Success) { eol = eolMatch.Groups["eol"].Value; } string[] lines = content.Split(new string[] { eol }, StringSplitOptions.None); int linenum = 1; Match lineMatch; foreach (string line in lines) { lineMatch = fileHeaderExpression.Match(line.Trim()); if (lineMatch != null && lineMatch.Success) { if (currentFile != null) // Close out previous file region { currentFile.BodyRegion = new DomRegion(currentFile.BodyRegion.Start.Line, currentFile.BodyRegion.Start.Column, linenum - 1, int.MaxValue); } if (currentRegion != null) // Close out previous chunk region { currentRegion.BodyRegion = new DomRegion(currentRegion.BodyRegion.Start.Line, currentRegion.BodyRegion.Start.Column, linenum - 1, int.MaxValue); } // Create new file region currentFile = new DomType(cu, ClassType.Unknown, Modifiers.None, lastToken(lineMatch.Groups["filepath"].Value), new DomLocation(linenum, 1), string.Empty, new DomRegion(linenum, line.Length + 1, linenum, int.MaxValue)); cu.Add(currentFile); } else { lineMatch = chunkExpression.Match(line); if (lineMatch != null && lineMatch.Success) { if (currentRegion != null) // Close out previous chunk region { currentRegion.BodyRegion = new DomRegion(currentRegion.BodyRegion.Start.Line, currentRegion.BodyRegion.Start.Column, linenum - 1, int.MaxValue); } // Create new chunk region currentRegion = new DomProperty(lineMatch.Groups["chunk"].Value, Modifiers.None, new DomLocation(linenum, 1), new DomRegion(linenum, line.Length + 1, linenum, int.MaxValue), null); currentFile.Add(currentRegion); } } ++linenum; } // Close out trailing regions if (currentFile != null) { currentFile.BodyRegion = new DomRegion(currentFile.BodyRegion.Start.Line, currentFile.BodyRegion.Start.Column, Math.Max(1, linenum - 2), int.MaxValue); } if (currentRegion != null) { currentRegion.BodyRegion = new DomRegion(currentRegion.BodyRegion.Start.Line, currentRegion.BodyRegion.Start.Column, Math.Max(1, linenum - 2), int.MaxValue); } return(doc); }
string GetSemanticStyle(ParsedDocument parsedDocument, Chunk chunk, ref int endOffset) { var unit = parsedDocument.LanguageAST as ICSharpCode.NRefactory.CSharp.CompilationUnit; if (unit == null) { return(null); } var loc = doc.OffsetToLocation(chunk.Offset); if (contextualKeywords.Contains(wordbuilder.ToString())) { var node = unit.GetNodeAt(loc.Line, loc.Column); if (node is Identifier) { switch (((Identifier)node).Name) { case "value": // highlight 'value' in property setters and event add/remove var n = node.Parent; while (n != null) { if (n is Accessor && n.Role != PropertyDeclaration.GetterRole) { return(null); } n = n.Parent; } break; case "var": if (node.Parent != null) { var vds = node.Parent.Parent as VariableDeclarationStatement; if (node.Parent.Parent is ForeachStatement && ((ForeachStatement)node.Parent.Parent).VariableType.StartLocation == node.StartLocation || vds != null && node.StartLocation == vds.Type.StartLocation) { return(null); } } break; } } if (node is CSharpTokenNode) { return(null); } endOffset = doc.LocationToOffset(node.EndLocation.Line, node.EndLocation.Column); return(spanParser.CurSpan != null ? spanParser.CurSpan.Color : "text"); } else { // var type = unit.GetNodeAt<AstType> (loc.Line, loc.Column); // if (type is SimpleType) { // var st = (SimpleType)type; // if (st.IdentifierToken.Contains (loc.Line, loc.Column) && unit.GetNodeAt<UsingDeclaration> (loc.Line, loc.Column) == null) { // endOffset = doc.LocationToOffset (st.IdentifierToken.EndLocation.Line, st.IdentifierToken.EndLocation.Column); // return "keyword.semantic.type"; // } // return null; // } // if (type is ICSharpCode.NRefactory.CSharp.MemberType) { // var mt = (ICSharpCode.NRefactory.CSharp.MemberType)type; // if (mt.MemberNameToken.Contains (loc.Line, loc.Column) && unit.GetNodeAt<UsingDeclaration> (loc.Line, loc.Column) == null) { // endOffset = doc.LocationToOffset (mt.MemberNameToken.EndLocation.Line, mt.MemberNameToken.EndLocation.Column); // return "keyword.semantic.type"; // } // return null; // } // // var node = unit.GetNodeAt (loc.Line, loc.Column); // if (node is Identifier) { // if (node.Parent is TypeDeclaration && node.Role == TypeDeclaration.Roles.Identifier) { // endOffset = doc.LocationToOffset (node.EndLocation.Line, node.EndLocation.Column); // return "keyword.semantic.type"; // } // // if (node.Parent is VariableInitializer && node.Parent.Parent is FieldDeclaration || node.Parent is FixedVariableInitializer || node.Parent is EnumMemberDeclaration) { // endOffset = doc.LocationToOffset (node.EndLocation.Line, node.EndLocation.Column); // return "keyword.semantic.field"; // } // } // var identifierExpression = unit.GetNodeAt<IdentifierExpression> (loc.Line, loc.Column); // if (identifierExpression != null) { // var result = identifierExpression.ResolveExpression (document, resolver, loc); // if (result is MemberResolveResult) { // var member = ((MemberResolveResult)result).ResolvedMember; // if (member is IField) { // endOffset = doc.LocationToOffset (identifierExpression.EndLocation.Line, identifierExpression.EndLocation.Column); // return "keyword.semantic.field"; // } // if (member == null && result.ResolvedType != null && !string.IsNullOrEmpty (result.ResolvedType.FullName)) { // endOffset = doc.LocationToOffset (identifierExpression.EndLocation.Line, identifierExpression.EndLocation.Column); // return "keyword.semantic.type"; // } // } // } // // var memberReferenceExpression = unit.GetNodeAt<MemberReferenceExpression> (loc.Line, loc.Column); // if (memberReferenceExpression != null) { // if (!memberReferenceExpression.MemberNameToken.Contains (loc.Line, loc.Column)) // return null; // // var result = memberReferenceExpression.ResolveExpression (document, resolver, loc); // if (result is MemberResolveResult) { // var member = ((MemberResolveResult)result).ResolvedMember; // if (member is IField) { // endOffset = doc.LocationToOffset (memberReferenceExpression.MemberNameToken.EndLocation.Line, memberReferenceExpression.MemberNameToken.EndLocation.Column); // return "keyword.semantic.field"; // } // if (member == null && result.ResolvedType != null && !string.IsNullOrEmpty (result.ResolvedType.FullName)) { // endOffset = doc.LocationToOffset (memberReferenceExpression.MemberNameToken.EndLocation.Line, memberReferenceExpression.MemberNameToken.EndLocation.Column); // return "keyword.semantic.type"; // } // } // } } return(null); }
internal void SetParsedDocument (ParsedDocument newDocument, bool runInThread) { this.parsedDocument = newDocument; if (parsedDocument == null || parseInformationUpdaterWorkerThread == null) return; StopParseInfoThread (); if (runInThread) { parseInformationUpdaterWorkerThread.RunWorkerAsync (parsedDocument); } else { HandleParseInformationUpdaterWorkerThreadDoWork (null, new DoWorkEventArgs (parsedDocument)); } }
// Used by unit tests public void SetParsedDocument(ParsedDocument document) { parsedDocument = document; }
internal void SetParsedDocument (ParsedDocument newDocument, bool runInThread) { this.parsedDocument = newDocument; if (parsedDocument == null) return; StopParseInfoThread (); if (runInThread) { var token = parserInformationUpdateSrc.Token; System.Threading.Tasks.Task.Factory.StartNew (delegate { HandleParseInformationUpdaterWorkerThreadDoWork (false, parsedDocument, token); }); } else { HandleParseInformationUpdaterWorkerThreadDoWork (true, parsedDocument); } }
public static ICompilationUnit GetCompilationUnit(ParsedDocument input) { return(input.CompilationUnit); }
public SpecialTracker (ParsedDocument result) { this.result = result; }
static void AddTreeClassContents(TreeStore store, TreeIter parent, ParsedDocument parsedDocument, IType cls) { List <object> items = new List <object> (); foreach (object o in cls.Members) { items.Add(o); } items.Sort(delegate(object x, object y) { DomRegion r1 = GetRegion(x), r2 = GetRegion(y); return(r1.CompareTo(r2)); }); List <FoldingRegion> regions = new List <FoldingRegion> (); foreach (FoldingRegion fr in parsedDocument.UserRegions) { //check regions inside class if (cls.BodyRegion.Contains(fr.Region)) { regions.Add(fr); } } regions.Sort(delegate(FoldingRegion x, FoldingRegion y) { return(x.Region.CompareTo(y.Region)); }); IEnumerator <FoldingRegion> regionEnumerator = regions.GetEnumerator(); if (!regionEnumerator.MoveNext()) { regionEnumerator = null; } FoldingRegion currentRegion = null; TreeIter currentParent = parent; foreach (object item in items) { //no regions left; quick exit if (regionEnumerator != null) { DomRegion itemRegion = GetRegion(item); //advance to a region that could potentially contain this member while (regionEnumerator != null && !OuterEndsAfterInner(regionEnumerator.Current.Region, itemRegion)) { if (!regionEnumerator.MoveNext()) { regionEnumerator = null; } } //if member is within region, make sure it's the current parent. //If not, move target iter back to class parent if (regionEnumerator != null && regionEnumerator.Current.Region.Contains(itemRegion)) { if (currentRegion != regionEnumerator.Current) { currentParent = store.AppendValues(parent, regionEnumerator.Current); currentRegion = regionEnumerator.Current; } } else { currentParent = parent; } } TreeIter childIter = store.AppendValues(currentParent, item); if (item is IType) { AddTreeClassContents(store, childIter, parsedDocument, (IType)item); } } }
/// <summary> /// Returns the xml node which corresponds to the For Update tag. /// </summary> private XmlNode GetForUpdateTagXmlNode() { XmlNode myForUpdateNode = ParsedDocument.SelectSingleNode(string.Format(@"{0}/{1}[@{2}='{3}']", cRootXmlNodeName, cTagXmlNodeName, cTagTypeXmlAttributeName, ForUpdateTag.cTagName)); return(myForUpdateNode); }
public bool ExecuteCommand(ReturnKeyCommandArgs args, CommandExecutionContext context) { var textView = args.TextView; var subjectBuffer = args.SubjectBuffer; var spans = textView.Selection.GetSnapshotSpansOnBuffer(subjectBuffer); // Don't do anything special if there is multi-selection. It's not clear what sort of semantics that should have. if (spans.Count != 1) { return(false); } var snapshot = subjectBuffer.CurrentSnapshot; var document = snapshot.GetOpenDocumentInCurrentContextWithChanges(); if (document == null) { return(false); } if (!_globalOptions.GetOption(SplitCommentOptions.Enabled, document.Project.Language)) { return(false); } var splitCommentService = document.GetLanguageService <ISplitCommentService>(); if (splitCommentService == null) { return(false); } // If there is a selection, ensure that it's all on one-line. It's not clear what sort of semantics we // would want if this spanned multiple lines. var selectionSpan = spans[0].Span; var position = selectionSpan.Start; var line = subjectBuffer.CurrentSnapshot.GetLineFromPosition(position); var endLine = subjectBuffer.CurrentSnapshot.GetLineFromPosition(selectionSpan.End); if (line.LineNumber != endLine.LineNumber) { return(false); } // Quick check. If the line doesn't contain a comment in it before the caret, // then no point in doing any more expensive synchronous work. if (!LineProbablyContainsComment(splitCommentService, new SnapshotPoint(snapshot, position))) { return(false); } using (context.OperationContext.AddScope(allowCancellation: true, EditorFeaturesResources.Split_comment)) { var cancellationToken = context.OperationContext.UserCancellationToken; var parsedDocument = ParsedDocument.CreateSynchronously(document, cancellationToken); var result = SplitComment(parsedDocument, textView, subjectBuffer, new SnapshotSpan(snapshot, selectionSpan)); if (result == null) { return(false); } using var transaction = CaretPreservingEditTransaction.TryCreate( EditorFeaturesResources.Split_comment, textView, _undoHistoryRegistry, _editorOperationsFactoryService); subjectBuffer.Replace(result.Value.replacementSpan, result.Value.replacementText); transaction?.Complete(); return(true); } }
static void AddTreeClassContents(TreeStore store, TreeIter parent, ParsedDocument parsedDocument, ITypeDefinition cls, IUnresolvedTypeDefinition part) { List <object> items = new List <object> (); if (cls.Kind != TypeKind.Delegate) { foreach (var o in cls.GetMembers(m => part.Region.FileName == m.Region.FileName && part.Region.IsInside(m.Region.Begin))) { items.Add(o); } foreach (var o in cls.GetNestedTypes(m => part.Region.FileName == m.Region.FileName && part.Region.IsInside(m.Region.Begin))) { if (o.DeclaringType == cls) { items.Add(o); } } foreach (var o in cls.GetConstructors(m => part.Region.FileName == m.Region.FileName && part.Region.IsInside(m.Region.Begin))) { if (o.IsSynthetic) { continue; } items.Add(o); } } items.Sort(ClassOutlineNodeComparer.CompareRegion); List <FoldingRegion> regions = new List <FoldingRegion> (); foreach (FoldingRegion fr in parsedDocument.UserRegions) { //check regions inside class if (cls.BodyRegion.IsInside(fr.Region.Begin) && cls.BodyRegion.IsInside(fr.Region.End)) { regions.Add(fr); } } regions.Sort(delegate(FoldingRegion x, FoldingRegion y) { return(x.Region.Begin.CompareTo(y.Region.Begin)); }); IEnumerator <FoldingRegion> regionEnumerator = regions.GetEnumerator(); if (!regionEnumerator.MoveNext()) { regionEnumerator = null; } FoldingRegion currentRegion = null; TreeIter currentParent = parent; foreach (object item in items) { //no regions left; quick exit if (regionEnumerator != null) { DomRegion itemRegion = ClassOutlineNodeComparer.GetRegion(item); //advance to a region that could potentially contain this member while (regionEnumerator != null && !OuterEndsAfterInner(regionEnumerator.Current.Region, itemRegion)) { if (!regionEnumerator.MoveNext()) { regionEnumerator = null; } } //if member is within region, make sure it's the current parent. //If not, move target iter back to class parent if (regionEnumerator != null && regionEnumerator.Current.Region.IsInside(itemRegion.Begin)) { if (currentRegion != regionEnumerator.Current) { currentParent = store.AppendValues(parent, regionEnumerator.Current); currentRegion = regionEnumerator.Current; } } else { currentParent = parent; } } TreeIter childIter = store.AppendValues(currentParent, item); if (item is ITypeDefinition) { AddTreeClassContents(store, childIter, parsedDocument, (ITypeDefinition)item, part); } } }
public override ParsedDocument Parse(bool storeAst, string file, TextReader content, Project prj = null) { if (!storeAst) { return(null); } ProjectFile pf = null; if (prj == null) { var sln = Ide.IdeApp.ProjectOperations.CurrentSelectedSolution; if (sln != null) { foreach (var proj in sln.GetAllProjects()) { if (proj.IsFileInProject(file)) { prj = proj; pf = proj.GetProjectFile(file); break; } } } } else if (prj.IsFileInProject(file)) { pf = prj.GetProjectFile(file); } // HACK(?) The folds are parsed before the document gets loaded // - so reuse the last parsed document to save time // -- What if multiple docs are opened? if (LastParsedMod is ParsedDModule && LastParsedMod.FileName == file) { var d = LastParsedMod as ParsedDModule; LastParsedMod = null; return(d); } else { LastParsedMod = null; } var dprj = prj as AbstractDProject; // Remove obsolete ast from cache GlobalParseCache.RemoveModule(file); DModule ast; var doc = new ParsedDModule(file); var parser = DParser.Create(content); // Also put attention on non-ddoc comments; These will be used to generate foldable comment regions then parser.Lexer.OnlyEnlistDDocComments = false; // Parse the code try { ast = parser.Parse(); } catch (TooManyErrorsException x) { ast = parser.Document; } // Update project owner information / Build appropriate module name if (string.IsNullOrEmpty(ast.ModuleName)) { if (pf == null) { ast.ModuleName = Path.GetFileNameWithoutExtension(file); } else { ast.ModuleName = BuildModuleName(pf); } } ast.FileName = file; // Assign new ast to the ParsedDDocument object doc.DDom = ast; // Add parser errors to the parser output foreach (var parserError in parser.ParseErrors) { doc.ErrorList.Add(new Error( ErrorType.Error, parserError.Message, parserError.Location.Line, parserError.Location.Column)); } #region Provide comment fold support by addin them to the IDE document object foreach (var cm in parser.TrackerVariables.Comments) { var c = new MonoDevelop.Ide.TypeSystem.Comment(cm.CommentText) { CommentStartsLine = cm.CommentStartsLine, CommentType = (cm.CommentType & D_Parser.Parser.Comment.Type.Block) != 0 ? CommentType.Block : CommentType.SingleLine, IsDocumentation = cm.CommentType.HasFlag(D_Parser.Parser.Comment.Type.Documentation), }; if (c.CommentType == CommentType.SingleLine) { if (c.IsDocumentation) { c.OpenTag = "///"; } else { c.OpenTag = "//"; } } else { if (c.IsDocumentation) { c.OpenTag = "/**"; c.ClosingTag = "*/"; } else { c.OpenTag = "/*"; c.ClosingTag = "*/"; } } c.Region = new DomRegion(cm.StartPosition.Line, cm.StartPosition.Column, cm.EndPosition.Line, cm.EndPosition.Column); doc.Comments.Add(c); // Enlist TODO/FIXME/HACK etc. stuff in the IDE's project task list for (int i = CommentTag.SpecialCommentTags.Count - 1; i >= 0; i--) { if (c.Text.StartsWith(CommentTag.SpecialCommentTags[i].Tag)) { doc.Add(new Tag(CommentTag.SpecialCommentTags[i].Tag, c.Text, c.Region)); break; } } } #endregion #region Serialize to NRefactory Dom structure /* * var cu = new CompilationUnit(file); * doc.CompilationUnit = cu; * * var global = new DomType(cu, ClassType.Class, * Modifiers.Public | Modifiers.Partial, * "(global)", * new DomLocation(), * ast.ModuleName, * new DomRegion()); * cu.Add(global); * * foreach (var n in ast) * { * var ch = ConvertDParserToDomNode(n, doc); * * if (ch is DomField || ch is DomMethod) * global.Add(ch as IMember); * else * cu.Add(ch as IType); * } */ #endregion if (prj != null) { // Workaround for tags not being displayed var ctnt = TypeSystemService.GetProjectContentWrapper(prj); if (ctnt != null) { var tags = ctnt.GetExtensionObject <ProjectCommentTags>(); if (tags != null) { tags.UpdateTags(prj, file, doc.TagComments); } } } // Update UFCS ModulePackage pack; if ((pack = GlobalParseCache.GetPackage(ast, false)) != null && (pack = pack.Root) != null) { // If the file is not associated with any project, // check if the file is located in an imported/included directory // and update the respective cache. // Note: ParseCache.Remove() also affects the Ufcs cache, // but when adding it again, the UfcsCache has to be updated manually ParseCacheView pcw; bool containsPack = false; if (prj != null) { pcw = dprj.ParseCache; containsPack = true; } else { // Find out which compiler environment fits most pcw = null; foreach (var cmp in DCompilerService.Instance.Compilers) { pcw = cmp.GenParseCacheView(); foreach (var r in pack as IEnumerable <ModulePackage> ) { if (r == pack) { containsPack = true; break; } } if (containsPack) { break; } } } if (containsPack) { (pack as RootPackage).UfcsCache.CacheModuleMethods(ast, new ResolutionContext(pcw, null, ast)); } } return(doc); }
string GetSemanticStyleFromAst(ParsedDocument parsedDocument, Chunk chunk, ref int endOffset) { var unit = csharpSyntaxMode.unit; if (unit == null || csharpSyntaxMode.resolver == null) { return(null); } var loc = doc.OffsetToLocation(chunk.Offset); var node = unit.GetNodeAt(loc, n => n is Identifier || n is AstType || n is CSharpTokenNode); var word = wordbuilder.ToString(); string color; while (node != null && !(node is Statement || node is EntityDeclaration)) { if (node is CSharpTokenNode || node is ICSharpCode.NRefactory.CSharp.Comment || node is PreProcessorDirective) { break; } if (node is SimpleType) { var st = (SimpleType)node; var result = csharpSyntaxMode.resolver.Resolve(st); if (result.IsError && csharpSyntaxMode.guiDocument.Project != null) { endOffset = chunk.Offset + st.Identifier.Length; return("keyword.semantic.error"); } if (result is TypeResolveResult && st.IdentifierToken.Contains(loc) && unit.GetNodeAt <UsingDeclaration> (loc) == null) { endOffset = chunk.Offset + st.Identifier.Length; return("keyword.semantic.type"); } return(null); } if (node is ICSharpCode.NRefactory.CSharp.MemberType) { var mt = (ICSharpCode.NRefactory.CSharp.MemberType)node; var result = csharpSyntaxMode.resolver.Resolve(mt); if (result.IsError && csharpSyntaxMode.guiDocument.Project != null) { endOffset = chunk.Offset + mt.MemberName.Length; return("keyword.semantic.error"); } if (result is TypeResolveResult && mt.MemberNameToken.Contains(loc) && unit.GetNodeAt <UsingDeclaration> (loc) == null) { endOffset = chunk.Offset + mt.MemberName.Length; return("keyword.semantic.type"); } return(null); } if (node is Identifier) { if (node.Parent is TypeDeclaration && node.Role == Roles.Identifier) { endOffset = chunk.Offset + ((Identifier)node).Name.Length; return("keyword.semantic.type"); } if (node.Parent is PropertyDeclaration) { endOffset = chunk.Offset + ((Identifier)node).Name.Length; return("keyword.semantic.property"); } if (node.Parent is VariableInitializer && node.Parent.Parent is FieldDeclaration) { var field = node.Parent.Parent as FieldDeclaration; if (field.Modifiers.HasFlag(Modifiers.Const) || field.Modifiers.HasFlag(Modifiers.Static | Modifiers.Readonly)) { return(null); } endOffset = chunk.Offset + ((Identifier)node).Name.Length; return("keyword.semantic.field"); } if (node.Parent is FixedVariableInitializer /*|| node.Parent is EnumMemberDeclaration*/) { endOffset = chunk.Offset + ((Identifier)node).Name.Length; return("keyword.semantic.field"); } } var id = node as IdentifierExpression; if (id != null) { var result = csharpSyntaxMode.resolver.Resolve(id); if (result.IsError && csharpSyntaxMode.guiDocument.Project != null) { endOffset = chunk.Offset + id.Identifier.Length; return("keyword.semantic.error"); } if (result is MemberResolveResult) { var member = ((MemberResolveResult)result).Member; if (member is IField) { var field = member as IField; if (field.IsConst || field.IsStatic && field.IsReadOnly) { return(null); } endOffset = chunk.Offset + id.Identifier.Length; return("keyword.semantic.field"); } if (member is IProperty) { endOffset = chunk.Offset + id.Identifier.Length; return("keyword.semantic.property"); } } if (result is TypeResolveResult) { if (!result.IsError && csharpSyntaxMode.guiDocument.Project != null) { endOffset = chunk.Offset + id.Identifier.Length; return("keyword.semantic.type"); } } } var memberReferenceExpression = node as MemberReferenceExpression; if (memberReferenceExpression != null) { if (!memberReferenceExpression.MemberNameToken.Contains(loc)) { return(null); } var result = csharpSyntaxMode.resolver.Resolve(memberReferenceExpression); if (result.IsError && csharpSyntaxMode.guiDocument.Project != null) { endOffset = chunk.Offset + memberReferenceExpression.MemberName.Length; return("keyword.semantic.error"); } if (result is MemberResolveResult) { var member = ((MemberResolveResult)result).Member; if (member is IField && !member.IsStatic && !((IField)member).IsConst) { endOffset = chunk.Offset + memberReferenceExpression.MemberName.Length; return("keyword.semantic.field"); } } if (result is TypeResolveResult) { if (!result.IsError && csharpSyntaxMode.guiDocument.Project != null) { endOffset = chunk.Offset + memberReferenceExpression.MemberName.Length; return("keyword.semantic.type"); } } } node = node.Parent; } if (csharpSyntaxMode.contextualHighlightKeywords.TryGetValue(word, out color)) { if (node == null) { return(null); } switch (word) { case "value": // highlight 'value' in property setters and event add/remove var n = node.Parent; while (n != null) { if (n is Accessor && n.Role == PropertyDeclaration.SetterRole) { endOffset = chunk.Offset + "value".Length; return(color); } n = n.Parent; } return(null); } endOffset = chunk.Offset + word.Length; if (node is CSharpTokenNode) { return(color); } return(spanParser.CurSpan != null ? spanParser.CurSpan.Color : "text"); } if (ContextualDehighlightKeywordList.Contains(word)) { if (node == null) { return(null); } if (node is Identifier) { switch (((Identifier)node).Name) { case "var": if (node.Parent != null) { var vds = node.Parent.Parent as VariableDeclarationStatement; if (node.Parent.Parent is ForeachStatement && ((ForeachStatement)node.Parent.Parent).VariableType.StartLocation == node.StartLocation || vds != null && node.StartLocation == vds.Type.StartLocation) { return(null); } } endOffset = chunk.Offset + "var".Length; return(spanParser.CurSpan != null ? spanParser.CurSpan.Color : "text"); } } else if (node is CSharpTokenNode) { return(color); } endOffset = chunk.Offset + word.Length; return(spanParser.CurSpan != null ? spanParser.CurSpan.Color : "text"); } return(null); }
void BuildTreeChildren (TreeStore store, TreeIter parent, ParsedDocument parsedDocument) { if (parsedDocument == null) return; foreach (var unresolvedCls in parsedDocument.TopLevelTypeDefinitions) { var cls = document.Compilation.MainAssembly.GetTypeDefinition (unresolvedCls.FullTypeName); if (cls == null) continue; TreeIter childIter; if (!parent.Equals (TreeIter.Zero)) childIter = store.AppendValues (parent, cls); else childIter = store.AppendValues (cls); AddTreeClassContents (store, childIter, parsedDocument, cls, unresolvedCls); } }
public static MonoDevelop.Projects.Dom.INode ConvertDParserToDomNode(D_Parser.Dom.INode n, ParsedDocument doc) { //TODO: DDoc comments! if (n is DMethod) { var dm = n as DMethod; var domMethod = new DomMethod( n.Name, GetNodeModifiers(dm), dm.SpecialType == DMethod.MethodType.Constructor ? MethodModifier.IsConstructor : MethodModifier.None, FromCodeLocation(n.StartLocation), GetBlockBodyRegion(dm), GetReturnType(n)); foreach (var pn in dm.Parameters) { domMethod.Add(new DomParameter(domMethod, pn.Name, GetReturnType(pn))); } domMethod.AddTypeParameter(GetTypeParameters(dm)); foreach (var subNode in dm) { domMethod.AddChild(ConvertDParserToDomNode(subNode, doc)); } return(domMethod); } else if (n is DEnum) { var de = n as DEnum; var domType = new DomType( doc.CompilationUnit, ClassType.Enum, GetNodeModifiers(de), n.Name, FromCodeLocation(n.StartLocation), BuildTypeNamespace(n), GetBlockBodyRegion(de)); foreach (var subNode in de) { domType.Add(ConvertDParserToDomNode(subNode, doc) as IMember); } return(domType); } else if (n is DClassLike) { var dc = n as DClassLike; ClassType ct = ClassType.Unknown; switch (dc.ClassType) { case DTokens.Template: case DTokens.Class: ct = ClassType.Class; break; case DTokens.Interface: ct = ClassType.Interface; break; case DTokens.Union: case DTokens.Struct: ct = ClassType.Struct; break; } var domType = new DomType( doc.CompilationUnit, ct, GetNodeModifiers(dc), n.Name, FromCodeLocation(n.StartLocation), BuildTypeNamespace(n), GetBlockBodyRegion(dc)); domType.AddTypeParameter(GetTypeParameters(dc)); foreach (var subNode in dc) { domType.Add(ConvertDParserToDomNode(subNode, doc) as IMember); } return(domType); } else if (n is DVariable) { var dv = n as DVariable; return(new DomField(n.Name, GetNodeModifiers(dv), FromCodeLocation(n.StartLocation), GetReturnType(n))); } return(null); }
public override ParsedDocument Parse(bool storeAst, string file, TextReader content, Project prj = null) { if (!storeAst) return null; ProjectFile pf = null; if (prj == null) { var sln = Ide.IdeApp.ProjectOperations.CurrentSelectedSolution; if (sln != null) foreach (var proj in sln.GetAllProjects()) if (proj.IsFileInProject(file)) { prj = proj; pf = proj.GetProjectFile(file); break; } } else if(prj.IsFileInProject(file)) { pf = prj.GetProjectFile(file); } // HACK(?) The folds are parsed before the document gets loaded // - so reuse the last parsed document to save time // -- What if multiple docs are opened? var d = LastParsedMod as ParsedDModule; if (d != null && d.FileName == file) { LastParsedMod = null; return d; } else LastParsedMod = null; var dprj = prj as AbstractDProject; // Remove obsolete ast from cache if(file != null) GlobalParseCache.RemoveModule (file); DModule ast; var doc = new ParsedDModule(file); var parser = DParser.Create(content); // Also put attention on non-ddoc comments; These will be used to generate foldable comment regions then parser.Lexer.OnlyEnlistDDocComments = false; // Parse the code try { ast = parser.Parse(); } catch (TooManyErrorsException) { ast = parser.Document; } catch(System.Exception ex) { doc.ErrorList.Add(new Error(ErrorType.Error, ex.Message)); return doc; } if(ast == null) return doc; // Update project owner information / Build appropriate module name if(string.IsNullOrEmpty(ast.ModuleName)) { if(pf == null) ast.ModuleName = file != null ? Path.GetFileNameWithoutExtension(file) : string.Empty; else ast.ModuleName = BuildModuleName(pf); } ast.FileName = file; // Assign new ast to the ParsedDDocument object doc.DDom = ast; // Add parser errors to the parser output foreach (var parserError in parser.ParseErrors) doc.ErrorList.Add(new Error( ErrorType.Error, parserError.Message, parserError.Location.Line, parserError.Location.Column)); #region Provide comment fold support by addin them to the IDE document object foreach (var cm in parser.Comments) { var c = new MonoDevelop.Ide.TypeSystem.Comment(cm.CommentText){ CommentStartsLine = cm.CommentStartsLine, CommentType = (cm.CommentType & D_Parser.Parser.Comment.Type.Block) != 0 ? CommentType.Block : CommentType.SingleLine, IsDocumentation = cm.CommentType.HasFlag(D_Parser.Parser.Comment.Type.Documentation), }; if (c.CommentType == CommentType.SingleLine) { if (c.IsDocumentation) c.OpenTag = "///"; else c.OpenTag = "//"; } else { if (c.IsDocumentation) { c.OpenTag = "/**"; c.ClosingTag = "*/"; } else { c.OpenTag = "/*"; c.ClosingTag = "*/"; } } c.Region = new DomRegion(cm.StartPosition.Line, cm.StartPosition.Column, cm.EndPosition.Line, cm.EndPosition.Column); doc.Comments.Add(c); // Enlist TODO/FIXME/HACK etc. stuff in the IDE's project task list for (int i = CommentTag.SpecialCommentTags.Count-1; i >= 0 ; i--) if (c.Text.StartsWith(CommentTag.SpecialCommentTags[i].Tag)) { doc.Add(new Tag(CommentTag.SpecialCommentTags[i].Tag, c.Text, c.Region)); break; } } #endregion #region Serialize to NRefactory Dom structure /* var cu = new CompilationUnit(file); doc.CompilationUnit = cu; var global = new DomType(cu, ClassType.Class, Modifiers.Public | Modifiers.Partial, "(global)", new DomLocation(), ast.ModuleName, new DomRegion()); cu.Add(global); foreach (var n in ast) { var ch = ConvertDParserToDomNode(n, doc); if (ch is DomField || ch is DomMethod) global.Add(ch as IMember); else cu.Add(ch as IType); } */ #endregion if (prj != null) { // Workaround for tags not being displayed var ctnt = TypeSystemService.GetProjectContentWrapper(prj); if (ctnt != null) { var tags = ctnt.GetExtensionObject<ProjectCommentTags>(); if (tags != null) tags.UpdateTags(prj, file, doc.TagComments); } } return doc; }
void VisitPreprocessorDirective(ParsedDocument result, SpecialsBag.PreProcessorDirective directive) { TextLocation loc = new TextLocation(directive.Line, directive.Col); switch (directive.Cmd) { case Tokenizer.PreprocessorDirective.If: conditionalRegions.Push(new ConditionalRegion(directive.Arg)); ifBlocks.Push(directive); ConditionalRegion.Start = loc; break; case Tokenizer.PreprocessorDirective.Elif: CloseConditionBlock(new TextLocation(directive.EndLine, directive.EndCol)); if (ConditionalRegion != null) { ConditionalRegion.ConditionBlocks.Add(new ConditionBlock(directive.Arg, loc)); } break; case Tokenizer.PreprocessorDirective.Else: CloseConditionBlock(new TextLocation(directive.EndLine, directive.EndCol)); if (ConditionalRegion != null) { ConditionalRegion.ElseBlock = new DomRegion(loc, TextLocation.Empty); } break; case Tokenizer.PreprocessorDirective.Endif: TextLocation endLoc = new TextLocation(directive.EndLine, directive.EndCol); CloseConditionBlock(endLoc); if (ConditionalRegion != null && !ConditionalRegion.ElseBlock.Begin.IsEmpty) { ConditionalRegion.ElseBlock = new DomRegion(ConditionalRegion.ElseBlock.Begin, endLoc); } AddCurRegion(result, directive.EndLine, directive.EndCol); if (ifBlocks.Count > 0) { var ifBlock = ifBlocks.Pop(); var ifRegion = new DomRegion(ifBlock.Line, ifBlock.Col, directive.EndLine, directive.EndCol); result.Add(new FoldingRegion("#if " + ifBlock.Arg.Trim(), ifRegion, FoldType.UserRegion, false)); foreach (var d in elifBlocks) { var elIlfRegion = new DomRegion(d.Line, d.Col, directive.EndLine, directive.EndCol); result.Add(new FoldingRegion("#elif " + ifBlock.Arg.Trim(), elIlfRegion, FoldType.UserRegion, false)); } if (elseBlock != null) { var elseBlockRegion = new DomRegion(elseBlock.Line, elseBlock.Col, elseBlock.Line, elseBlock.Col); result.Add(new FoldingRegion("#else", elseBlockRegion, FoldType.UserRegion, false)); } } elseBlock = null; break; case Tokenizer.PreprocessorDirective.Define: result.Add(new PreProcessorDefine(directive.Arg, loc)); break; case Tokenizer.PreprocessorDirective.Region: regions.Push(directive); break; case Tokenizer.PreprocessorDirective.Endregion: if (regions.Count > 0) { var start = regions.Pop(); DomRegion dr = new DomRegion(start.Line, loc.Column, directive.EndLine, directive.EndCol); result.Add(new FoldingRegion(start.Arg, dr, FoldType.UserRegion, true)); } break; } }
public MDRefactoringContext (DotNetProject project, TextEditorData data, ParsedDocument parsedDocument, CSharpAstResolver resolver, TextLocation loc, CancellationToken cancellationToken = default (CancellationToken)) : base (resolver, cancellationToken) { this.TextEditor = data; this.ParsedDocument = parsedDocument; this.Project = project; var policy = Project.Policies.Get<CSharpFormattingPolicy> (); this.formattingOptions = policy.CreateOptions (); this.location = loc; var namingPolicy = Project.Policies.Get<NameConventionPolicy> (); Services.AddService (typeof(NamingConventionService), namingPolicy.CreateNRefactoryService ()); }
public override ParsedDocument Parse(bool storeAst, string file, TextReader content, Project prj = null) { if (!storeAst) { return(null); } ProjectFile pf = null; if (prj == null) { var sln = Ide.IdeApp.ProjectOperations.CurrentSelectedSolution; if (sln != null) { foreach (var proj in sln.GetAllProjects()) { if (proj.IsFileInProject(file)) { prj = proj; pf = proj.GetProjectFile(file); break; } } } } else if (prj.IsFileInProject(file)) { pf = prj.GetProjectFile(file); } // HACK(?) The folds are parsed before the document gets loaded // - so reuse the last parsed document to save time // -- What if multiple docs are opened? var d = LastParsedMod as ParsedDModule; if (d != null && d.FileName == file) { LastParsedMod = null; return(d); } else { LastParsedMod = null; } var dprj = prj as AbstractDProject; // Remove obsolete ast from cache if (file != null) { GlobalParseCache.RemoveModule(file); } DModule ast; var doc = new ParsedDModule(file); var parser = DParser.Create(content); // Also put attention on non-ddoc comments; These will be used to generate foldable comment regions then parser.Lexer.OnlyEnlistDDocComments = false; // Parse the code try { ast = parser.Parse(); } catch (TooManyErrorsException) { ast = parser.Document; } // Update project owner information / Build appropriate module name if (string.IsNullOrEmpty(ast.ModuleName)) { if (pf == null) { ast.ModuleName = file != null?Path.GetFileNameWithoutExtension(file) : string.Empty; } else { ast.ModuleName = BuildModuleName(pf); } } ast.FileName = file; // Assign new ast to the ParsedDDocument object doc.DDom = ast; // Add parser errors to the parser output foreach (var parserError in parser.ParseErrors) { doc.ErrorList.Add(new Error( ErrorType.Error, parserError.Message, parserError.Location.Line, parserError.Location.Column)); } #region Provide comment fold support by addin them to the IDE document object foreach (var cm in parser.Comments) { var c = new MonoDevelop.Ide.TypeSystem.Comment(cm.CommentText) { CommentStartsLine = cm.CommentStartsLine, CommentType = (cm.CommentType & D_Parser.Parser.Comment.Type.Block) != 0 ? CommentType.Block : CommentType.SingleLine, IsDocumentation = cm.CommentType.HasFlag(D_Parser.Parser.Comment.Type.Documentation), }; if (c.CommentType == CommentType.SingleLine) { if (c.IsDocumentation) { c.OpenTag = "///"; } else { c.OpenTag = "//"; } } else { if (c.IsDocumentation) { c.OpenTag = "/**"; c.ClosingTag = "*/"; } else { c.OpenTag = "/*"; c.ClosingTag = "*/"; } } c.Region = new DomRegion(cm.StartPosition.Line, cm.StartPosition.Column, cm.EndPosition.Line, cm.EndPosition.Column); doc.Comments.Add(c); // Enlist TODO/FIXME/HACK etc. stuff in the IDE's project task list for (int i = CommentTag.SpecialCommentTags.Count - 1; i >= 0; i--) { if (c.Text.StartsWith(CommentTag.SpecialCommentTags[i].Tag)) { doc.Add(new Tag(CommentTag.SpecialCommentTags[i].Tag, c.Text, c.Region)); break; } } } #endregion #region Serialize to NRefactory Dom structure /* * var cu = new CompilationUnit(file); * doc.CompilationUnit = cu; * * var global = new DomType(cu, ClassType.Class, * Modifiers.Public | Modifiers.Partial, * "(global)", * new DomLocation(), * ast.ModuleName, * new DomRegion()); * cu.Add(global); * * foreach (var n in ast) * { * var ch = ConvertDParserToDomNode(n, doc); * * if (ch is DomField || ch is DomMethod) * global.Add(ch as IMember); * else * cu.Add(ch as IType); * } */ #endregion if (prj != null) { // Workaround for tags not being displayed var ctnt = TypeSystemService.GetProjectContentWrapper(prj); if (ctnt != null) { var tags = ctnt.GetExtensionObject <ProjectCommentTags>(); if (tags != null) { tags.UpdateTags(prj, file, doc.TagComments); } } } return(doc); }
public SourceEditorWidget (SourceEditorView view) { this.view = view; vbox.SetSizeRequest (32, 32); this.lastActiveEditor = this.textEditor = new MonoDevelop.SourceEditor.ExtensibleTextEditor (view); this.textEditor.TextArea.FocusInEvent += (o, s) => { lastActiveEditor = (ExtensibleTextEditor)((TextArea)o).GetTextEditorData ().Parent; view.FireCompletionContextChanged (); }; this.textEditor.TextArea.FocusOutEvent += delegate { if (this.splittedTextEditor == null || !splittedTextEditor.TextArea.HasFocus) OnLostFocus (); }; mainsw = new DecoratedScrolledWindow (this); mainsw.SetTextEditor (textEditor); vbox.PackStart (mainsw, true, true, 0); textEditorData = textEditor.GetTextEditorData (); ResetFocusChain (); UpdateLineCol (); // this.IsClassBrowserVisible = this.widget.TextEditor.Options.EnableQuickFinder; vbox.BorderWidth = 0; vbox.Spacing = 0; vbox.Focused += delegate { UpdateLineCol (); }; vbox.Destroyed += delegate { isDisposed = true; RemoveErrorUndelinesResetTimerId (); StopParseInfoThread (); KillWidgets (); foreach (var provider in quickTaskProvider.ToArray ()) { RemoveQuickTaskProvider (provider); } this.lastActiveEditor = null; this.splittedTextEditor = null; view = null; parsedDocument = null; // IdeApp.Workbench.StatusBar.ClearCaretState (); if (parseInformationUpdaterWorkerThread != null) { parseInformationUpdaterWorkerThread.Dispose (); parseInformationUpdaterWorkerThread = null; } }; vbox.ShowAll (); parseInformationUpdaterWorkerThread = new BackgroundWorker (); parseInformationUpdaterWorkerThread.WorkerSupportsCancellation = true; parseInformationUpdaterWorkerThread.DoWork += HandleParseInformationUpdaterWorkerThreadDoWork; }
private bool TryStart(CancellationToken cancellationToken) { _threadingContext.ThrowIfNotOnUIThread(); var closingSnapshotPoint = ClosingPoint.GetPoint(SubjectBuffer.CurrentSnapshot); if (closingSnapshotPoint.Position < 1) { Debug.Fail("The closing point was not found at the expected position."); return(false); } var openingSnapshotPoint = closingSnapshotPoint.Subtract(1); if (openingSnapshotPoint.GetChar() != OpeningBrace) { // there is a bug in editor brace completion engine on projection buffer that already fixed in vs_pro. until that is FIed to use // I will make this not to assert // Debug.Fail("The opening brace was not found at the expected position."); return(false); } OpeningPoint = SubjectBuffer.CurrentSnapshot.CreateTrackingPoint(openingSnapshotPoint, PointTrackingMode.Positive); var document = SubjectBuffer.CurrentSnapshot.GetOpenDocumentInCurrentContextWithChanges(); if (document == null) { return(false); } var parsedDocument = ParsedDocument.CreateSynchronously(document, cancellationToken); var context = GetBraceCompletionContext(parsedDocument); // Note: completes synchronously unless Semantic Model is needed to determine the result: if (!_service.HasBraceCompletionAsync(context, document, cancellationToken).WaitAndGetResult(cancellationToken)) { return(false); } var braceResult = _service.GetBraceCompletion(context); using var caretPreservingTransaction = new CaretPreservingEditTransaction(EditorFeaturesResources.Brace_Completion, _undoHistory, _editorOperations); // Apply the change to complete the brace. ApplyBraceCompletionResult(braceResult); // switch the closing point from positive to negative tracking so that the closing point stays against the closing brace ClosingPoint = SubjectBuffer.CurrentSnapshot.CreateTrackingPoint(ClosingPoint.GetPoint(SubjectBuffer.CurrentSnapshot), PointTrackingMode.Negative); if (TryGetBraceCompletionContext(out var contextAfterStart, cancellationToken)) { var indentationOptions = SubjectBuffer.GetIndentationOptions(_editorOptionsService, contextAfterStart.Document.LanguageServices, explicitFormat: false); var changesAfterStart = _service.GetTextChangesAfterCompletion(contextAfterStart, indentationOptions, cancellationToken); if (changesAfterStart != null) { ApplyBraceCompletionResult(changesAfterStart.Value); } } caretPreservingTransaction.Complete(); return(true); }
public ImportSymbolCompletionData (MonoDevelop.Ide.Gui.Document doc, ImportSymbolCache cache, IType type) { this.doc = doc; this.cache = cache; // this.data = doc.Editor; this.ambience = AmbienceService.GetAmbience (doc.Editor.MimeType); this.type = type; this.unit = doc.ParsedDocument; }
/// <summary> /// Synchronous implementation for a command handler. /// </summary> public static (SourceText text, TextSpan semicolonSpan) ConvertNamespaceDeclaration(ParsedDocument document, NamespaceDeclarationSyntax namespaceDeclaration, SyntaxFormattingOptions options, CancellationToken cancellationToken) { // Replace the block namespace with the file scoped namespace. var annotation = new SyntaxAnnotation(); var(updatedRoot, semicolonSpan) = ReplaceWithFileScopedNamespace(document, namespaceDeclaration, annotation); var updatedDocument = document.WithChangedRoot(updatedRoot, cancellationToken); // Determine how much indentation we had inside the original block namespace. We'll attempt to remove // that much indentation from each applicable line after we conver the block namespace to a file scoped // namespace. var indentation = GetIndentation(document, namespaceDeclaration, options, cancellationToken); if (indentation == null) { return(updatedDocument.Text, semicolonSpan); } // Now, find the file scoped namespace in the updated doc and go and dedent every line if applicable. return(DedentNamespace(updatedDocument, indentation, annotation, cancellationToken)); }
void UpdateParsedDocument(object sender, EventArgs args) { lastCU = DocumentContext.ParsedDocument; OnParsedDocumentUpdated(); }
public void ExecuteCommand(PasteCommandArgs args, Action nextCommandHandler, CommandExecutionContext executionContext) { Contract.ThrowIfFalse(_threadingContext.HasMainThread); var textView = args.TextView; var subjectBuffer = args.SubjectBuffer; var selectionsBeforePaste = textView.Selection.GetSnapshotSpansOnBuffer(subjectBuffer); var snapshotBeforePaste = subjectBuffer.CurrentSnapshot; // Always let the real paste go through. That way we always have a version of the document that doesn't // include our changes that we can undo back to. nextCommandHandler(); // If we don't even see any changes from the paste, there's nothing we can do. if (snapshotBeforePaste.Version.Changes is null) { return; } // If the user has the option off, then don't bother doing anything once we've sent the paste through. if (!_globalOptions.GetOption(FeatureOnOffOptions.AutomaticallyFixStringContentsOnPaste, LanguageNames.CSharp)) { return; } // if we're not even sure where the user caret/selection is on this buffer, we can't proceed. if (selectionsBeforePaste.Count == 0) { return; } var snapshotAfterPaste = subjectBuffer.CurrentSnapshot; // If there were multiple changes that already happened, then don't make any changes. Some other component // already did something advanced. if (snapshotAfterPaste.Version != snapshotBeforePaste.Version.Next) { return; } // Have to even be in a C# doc to be able to have special space processing here. var documentBeforePaste = snapshotBeforePaste.GetOpenDocumentInCurrentContextWithChanges(); var documentAfterPaste = snapshotAfterPaste.GetOpenDocumentInCurrentContextWithChanges(); if (documentBeforePaste == null || documentAfterPaste == null) { return; } var cancellationToken = executionContext.OperationContext.UserCancellationToken; var parsedDocumentBeforePaste = ParsedDocument.CreateSynchronously(documentBeforePaste, cancellationToken); // When pasting, only do anything special if the user selections were entirely inside a single string // token/expression. Otherwise, we have a multi-selection across token kinds which will be extremely // complex to try to reconcile. var stringExpressionBeforePaste = TryGetCompatibleContainingStringExpression(parsedDocumentBeforePaste, selectionsBeforePaste); if (stringExpressionBeforePaste == null) { return; } // Also ensure that all the changes the editor actually applied were inside a single string // token/expression. If the editor decided to make changes outside of the string, we definitely do not want // to do anything here. var stringExpressionBeforePasteFromChanges = TryGetCompatibleContainingStringExpression( parsedDocumentBeforePaste, new NormalizedSnapshotSpanCollection(snapshotBeforePaste, snapshotBeforePaste.Version.Changes.Select(c => c.OldSpan))); if (stringExpressionBeforePaste != stringExpressionBeforePasteFromChanges) { return; } var textChanges = GetEdits(cancellationToken); // If we didn't get any viable changes back, don't do anything. if (textChanges.IsDefaultOrEmpty) { return; } var newTextAfterChanges = snapshotBeforePaste.AsText().WithChanges(textChanges); // If we end up making the same changes as what the paste did, then no need to proceed. if (ContentsAreSame(snapshotBeforePaste, snapshotAfterPaste, stringExpressionBeforePaste, newTextAfterChanges)) { return; } // Create two edits to make the change. The first restores the buffer to the original snapshot (effectively // undoing the first set of changes). Then the second actually applies the change. // // Do this as direct edits, passing 'EditOptions.None' for the options, as we want to control the edits // precisely and don't want any strange interpretation of where the caret should end up. Other options // (like DefaultMinimalChange) will attempt to diff/merge edits oddly sometimes which can lead the caret // ending up before/after some merged change, which will no longer match the behavior of precise pastes. // // Wrap this all as a transaction so that these two edits appear to be one single change. This also allows // the user to do a single 'undo' that gets them back to the original paste made at the start of this // method. using var transaction = new CaretPreservingEditTransaction( CSharpEditorResources.Fixing_string_literal_after_paste, textView, _undoHistoryRegistry, _editorOperationsFactoryService); { var edit = subjectBuffer.CreateEdit(EditOptions.None, reiteratedVersionNumber: null, editTag: null); foreach (var change in snapshotBeforePaste.Version.Changes) { edit.Replace(change.NewSpan, change.OldText); } edit.Apply(); } { var edit = subjectBuffer.CreateEdit(EditOptions.None, reiteratedVersionNumber: null, editTag: null); foreach (var selection in selectionsBeforePaste) { edit.Replace(selection.Span, ""); } foreach (var change in textChanges) { edit.Replace(change.Span.ToSpan(), change.NewText); } edit.Apply(); } transaction.Complete(); return; ImmutableArray <TextChange> GetEdits(CancellationToken cancellationToken) { var newLine = textView.Options.GetNewLineCharacter(); var indentationWhitespace = DetermineIndentationWhitespace( parsedDocumentBeforePaste, subjectBuffer, snapshotBeforePaste.AsText(), stringExpressionBeforePaste, cancellationToken); // See if this is a paste of the last copy that we heard about. var edits = TryGetEditsFromKnownCopySource(newLine, indentationWhitespace); if (!edits.IsDefaultOrEmpty) { return(edits); } var pasteWasSuccessful = PasteWasSuccessful( snapshotBeforePaste, snapshotAfterPaste, documentAfterPaste, stringExpressionBeforePaste, cancellationToken); // If not, then just go through the fallback code path that applies more heuristics. var unknownPasteProcessor = new UnknownSourcePasteProcessor( newLine, indentationWhitespace, snapshotBeforePaste, snapshotAfterPaste, documentBeforePaste, documentAfterPaste, stringExpressionBeforePaste, pasteWasSuccessful); return(unknownPasteProcessor.GetEdits()); } ImmutableArray <TextChange> TryGetEditsFromKnownCopySource( string newLine, string indentationWhitespace) { // For simplicity, we only support smart copy/paste when we are pasting into a single contiguous region. if (selectionsBeforePaste.Count != 1) { return(default);
async static Task<PathEntry> GetRegionEntry (ParsedDocument unit, DocumentLocation loc) { PathEntry entry; FoldingRegion reg; try { var regions = await unit.GetUserRegionsAsync ().ConfigureAwait (false); if (unit == null || !regions.Any ()) return null; reg = regions.LastOrDefault (r => r.Region.Contains (loc)); } catch (AggregateException) { return null; } catch (OperationCanceledException) { return null; } if (reg == null) { entry = new PathEntry (GettextCatalog.GetString ("No region")); } else { entry = new PathEntry (CompilationUnitDataProvider.Pixbuf, GLib.Markup.EscapeText (reg.Name)); } entry.Position = EntryPosition.Right; return entry; }
protected override int AdjustFormattingEndPoint(ParsedDocument document, int startPoint, int endPoint) => endPoint;
public ParsedDocument Parse(bool storeAst, string file, TextReader content, Project prj = null) { if (!storeAst) { return(null); } ProjectFile pf = null; var modName = ""; if (prj == null) { var sln = Ide.IdeApp.ProjectOperations.CurrentSelectedSolution; if (sln != null) { foreach (var proj in sln.GetAllProjects()) { if (proj.IsFileInProject(file)) { prj = proj; pf = proj.GetProjectFile(file); modName = BuildModuleName(pf); break; } } } } else if (prj.IsFileInProject(file)) { pf = prj.GetProjectFile(file); modName = BuildModuleName(pf); } // HACK(?) The folds are parsed before the document gets loaded // - so reuse the last parsed document to save time // -- What if multiple docs are opened? if (LastParsedMod is ParsedDModule && LastParsedMod.FileName == file) { var d = (ParsedDModule)LastParsedMod; // Build appropriate module name if (pf != null) { d.DDom.ModuleName = BuildModuleName(pf); } LastParsedMod = null; return(d); } else { LastParsedMod = null; } var dprj = prj as DProject; // Remove obsolete ast from cache IAbstractSyntaxTree ast = null; if (dprj != null) { ast = dprj.LocalFileCache[modName]; if (ast != null) { dprj.LocalFileCache.Remove(ast); dprj.LocalFileCache.UfcsCache.RemoveModuleItems(ast); ast = null; } } var doc = new ParsedDModule(file); var parser = DParser.Create(content); // Also put attention on non-ddoc comments; These will be used to generate foldable comment regions then parser.Lexer.OnlyEnlistDDocComments = false; // Parse the code ast = parser.Parse(); // Update project owner information / Build appropriate module name ast.ModuleName = modName; ast.FileName = file; // Assign new ast to the ParsedDDocument object doc.DDom = ast; // Add parser errors to the parser output foreach (var parserError in parser.ParseErrors) { doc.ErrorList.Add(new Error( ErrorType.Error, parserError.Message, parserError.Location.Line, parserError.Location.Column)); } #region Provide comment fold support by addin them to the IDE document object foreach (var cm in parser.TrackerVariables.Comments) { var c = new MonoDevelop.Ide.TypeSystem.Comment(cm.CommentText); c.CommentType = cm.CommentType.HasFlag(D_Parser.Parser.Comment.Type.Block) ? CommentType.Block : CommentType.SingleLine; c.IsDocumentation = cm.CommentType.HasFlag(D_Parser.Parser.Comment.Type.Documentation); if (c.CommentType == CommentType.SingleLine) { if (c.IsDocumentation) { c.OpenTag = "///"; } else { c.OpenTag = "//"; } } else { if (c.IsDocumentation) { c.OpenTag = "/**"; c.ClosingTag = "*/"; } else { c.OpenTag = "/*"; c.ClosingTag = "*/"; } } c.Region = new DomRegion(cm.StartPosition.Line, cm.StartPosition.Column - 2, cm.EndPosition.Line, cm.EndPosition.Column); doc.Comments.Add(c); // Enlist TODO/FIXME/HACK etc. stuff in the IDE's project task list foreach (var sct in CommentTag.SpecialCommentTags) { if (c.Text.StartsWith(sct.Tag)) { doc.Add(new Tag(sct.Tag, c.Text, c.Region)); break; } } } #endregion #region Serialize to NRefactory Dom structure /* * var cu = new CompilationUnit(file); * doc.CompilationUnit = cu; * * var global = new DomType(cu, ClassType.Class, * Modifiers.Public | Modifiers.Partial, * "(global)", * new DomLocation(), * ast.ModuleName, * new DomRegion()); * cu.Add(global); * * foreach (var n in ast) * { * var ch = ConvertDParserToDomNode(n, doc); * * if (ch is DomField || ch is DomMethod) * global.Add(ch as IMember); * else * cu.Add(ch as IType); * } */ #endregion return(doc); }
public IUnresolvedTypeDefinition GetClass(bool getUserClass) { if (targetObject == null) { return(null); } var cls = gproject.FindClass(className, getUserClass); if (cls != null) { return(cls); } // The class name may have changed. Try to guess the new name. var matches = new List <IUnresolvedTypeDefinition> (); ParsedDocument unit = null; var ctx = gproject.GetParserContext(); var doc = TypeSystemService.ParseFile(project, classFile); if (doc != null) { unit = doc; foreach (var fcls in unit.TopLevelTypeDefinitions) { if (IsValidClass(fcls.Resolve(project), targetObject)) { matches.Add(fcls); } } } // If found the class, just return it if (matches.Count == 1) { cls = matches [0]; className = cls.FullName; targetObject.Name = className; gproject.Save(true); return(cls); } // If not found, warn the user. if (unit != null && unit.TopLevelTypeDefinitions.Count > 0) { using (SelectRenamedClassDialog dialog = new SelectRenamedClassDialog(unit.TopLevelTypeDefinitions.Select(c => c.Resolve(project)))) { if (dialog.Run()) { className = dialog.SelectedClass; if (className == null) { return(null); } else { targetObject.Name = className; gproject.Save(true); return(gproject.FindClass(className)); } } } } else { MessageService.ShowError(GettextCatalog.GetString("The class bound to the component '{0}' could not be found. This may be due to syntax errors in the source code file.", GetObjectName(targetObject))); } return(null); }
void UpdateDocumentOutline (object sender, EventArgs args) { lastCU = Document.ParsedDocument; //limit update rate to 3s if (!refreshingOutline) { refreshingOutline = true; refillOutlineStoreId = GLib.Timeout.Add (3000, RefillOutlineStore); } }
public override ParsedDocument Parse(bool storeAst, string file, TextReader content, Project prj = null) { if (!storeAst) return null; ProjectFile pf = null; if (prj == null) { var sln = Ide.IdeApp.ProjectOperations.CurrentSelectedSolution; if (sln != null) foreach (var proj in sln.GetAllProjects()) if (proj.IsFileInProject(file)) { prj = proj; pf = proj.GetProjectFile(file); break; } } else if(prj.IsFileInProject(file)) { pf = prj.GetProjectFile(file); } // HACK(?) The folds are parsed before the document gets loaded // - so reuse the last parsed document to save time // -- What if multiple docs are opened? if (LastParsedMod is ParsedDModule && LastParsedMod.FileName == file) { var d = LastParsedMod as ParsedDModule; LastParsedMod = null; return d; } else LastParsedMod = null; var dprj = prj as DProject; // Remove obsolete ast from cache DModule ast = null; if (dprj != null) { ast = dprj.LocalFileCache.GetModuleByFileName(file, prj.BaseDirectory) as DModule; if (ast != null) { dprj.LocalFileCache.Remove(ast); ast = null; } } var doc = new ParsedDModule(file); var parser = DParser.Create(content); // Also put attention on non-ddoc comments; These will be used to generate foldable comment regions then parser.Lexer.OnlyEnlistDDocComments = false; // Parse the code try { ast = parser.Parse(); } catch (TooManyErrorsException x) { ast = parser.Document; } // Update project owner information / Build appropriate module name if(string.IsNullOrEmpty(ast.ModuleName)) { if(pf == null) ast.ModuleName = Path.GetFileNameWithoutExtension(file); else ast.ModuleName = BuildModuleName(pf); } ast.FileName = file; // Assign new ast to the ParsedDDocument object doc.DDom = ast; // Add parser errors to the parser output foreach (var parserError in parser.ParseErrors) doc.ErrorList.Add(new Error( ErrorType.Error, parserError.Message, parserError.Location.Line, parserError.Location.Column)); #region Provide comment fold support by addin them to the IDE document object foreach (var cm in parser.TrackerVariables.Comments) { var c = new MonoDevelop.Ide.TypeSystem.Comment(cm.CommentText); c.CommentType = cm.CommentType.HasFlag(D_Parser.Parser.Comment.Type.Block) ? CommentType.Block : CommentType.SingleLine; c.IsDocumentation = cm.CommentType.HasFlag(D_Parser.Parser.Comment.Type.Documentation); if (c.CommentType == CommentType.SingleLine) { if (c.IsDocumentation) c.OpenTag = "///"; else c.OpenTag = "//"; } else { if (c.IsDocumentation) { c.OpenTag = "/**"; c.ClosingTag = "*/"; } else { c.OpenTag = "/*"; c.ClosingTag = "*/"; } } c.Region = new DomRegion(cm.StartPosition.Line, cm.StartPosition.Column, cm.EndPosition.Line, cm.EndPosition.Column); doc.Comments.Add(c); // Enlist TODO/FIXME/HACK etc. stuff in the IDE's project task list foreach (var sct in CommentTag.SpecialCommentTags) if (c.Text.StartsWith(sct.Tag)) { doc.Add(new Tag(sct.Tag, c.Text, c.Region)); break; } } // Workaround for tags not being displayed if (prj != null) { var ctnt = TypeSystemService.GetProjectContentWrapper(prj); if (ctnt != null) { var tags = ctnt.GetExtensionObject<ProjectCommentTags>(); if (tags != null) tags.UpdateTags(prj, file, doc.TagComments); } } else { // If the file is not associated with any project, // check if the file is located in an imported/included directory // and update the respective cache. // Note: ParseCache.Remove() also affects the Ufcs cache, // but when adding it again, the UfcsCache has to be updated manually var caches = new List<ParseCache>(); foreach(var p in Ide.IdeApp.Workspace.GetAllProjects()) if (p is DProject) { dprj = p as DProject; if (dprj.LocalIncludeCache.Remove(file)) caches.Add(dprj.LocalIncludeCache); if (dprj.LocalFileCache.Remove(file)) caches.Add(dprj.LocalFileCache); } foreach (var cmp in DCompilerService.Instance.Compilers) { if (cmp.ParseCache.Remove(file)) caches.Add(cmp.ParseCache); } if(caches.Count > 0) { var ctxt = Completion.DCodeCompletionSupport.CreateCurrentContext(); ctxt.CurrentContext.Set((IBlockNode)null); foreach (var cch in caches) { //FIXME: Adjust the target module name and/or copy the ast head cch.AddOrUpdate(ast); cch.UfcsCache.CacheModuleMethods(ast, ctxt); } } } #endregion #region Serialize to NRefactory Dom structure /* var cu = new CompilationUnit(file); doc.CompilationUnit = cu; var global = new DomType(cu, ClassType.Class, Modifiers.Public | Modifiers.Partial, "(global)", new DomLocation(), ast.ModuleName, new DomRegion()); cu.Add(global); foreach (var n in ast) { var ch = ConvertDParserToDomNode(n, doc); if (ch is DomField || ch is DomMethod) global.Add(ch as IMember); else cu.Add(ch as IType); } */ #endregion return doc; }
static void AddTreeClassContents (TreeStore store, TreeIter parent, ParsedDocument parsedDocument, ITypeDefinition cls, IUnresolvedTypeDefinition part) { List<object> items = new List<object> (); if (cls.Kind != TypeKind.Delegate) { foreach (var o in cls.GetMembers (m => part.Region.FileName == m.Region.FileName && part.Region.IsInside (m.Region.Begin))) { items.Add (o); } foreach (var o in cls.GetNestedTypes (m => part.Region.FileName == m.Region.FileName && part.Region.IsInside (m.Region.Begin))) { if (o.DeclaringType == cls) items.Add (o); } foreach (var o in cls.GetConstructors (m => part.Region.FileName == m.Region.FileName && part.Region.IsInside (m.Region.Begin))) { if (o.IsSynthetic) continue; items.Add (o); } } items.Sort (ClassOutlineNodeComparer.CompareRegion); List<FoldingRegion> regions = new List<FoldingRegion> (); foreach (FoldingRegion fr in parsedDocument.UserRegions) //check regions inside class if (cls.BodyRegion.IsInside (fr.Region.Begin) && cls.BodyRegion.IsInside (fr.Region.End)) regions.Add (fr); regions.Sort (delegate(FoldingRegion x, FoldingRegion y) { return x.Region.Begin.CompareTo (y.Region.Begin); }); IEnumerator<FoldingRegion> regionEnumerator = regions.GetEnumerator (); if (!regionEnumerator.MoveNext ()) regionEnumerator = null; FoldingRegion currentRegion = null; TreeIter currentParent = parent; foreach (object item in items) { //no regions left; quick exit if (regionEnumerator != null) { DomRegion itemRegion = ClassOutlineNodeComparer.GetRegion (item); //advance to a region that could potentially contain this member while (regionEnumerator != null && !OuterEndsAfterInner (regionEnumerator.Current.Region, itemRegion)) if (!regionEnumerator.MoveNext ()) regionEnumerator = null; //if member is within region, make sure it's the current parent. //If not, move target iter back to class parent if (regionEnumerator != null && regionEnumerator.Current.Region.IsInside (itemRegion.Begin)) { if (currentRegion != regionEnumerator.Current) { currentParent = store.AppendValues (parent, regionEnumerator.Current); currentRegion = regionEnumerator.Current; } } else { currentParent = parent; } } TreeIter childIter = store.AppendValues (currentParent, item); if (item is ITypeDefinition) AddTreeClassContents (store, childIter, parsedDocument, (ITypeDefinition)item, part); } }
static void AddToCache (ParsedDocument info) { if (info == null) throw new ArgumentNullException ("info"); lock (parsings) { if (parsings.Count >= MAX_PARSING_CACHE_SIZE) { DateTime tim = DateTime.MaxValue; string toDelete = null; foreach (KeyValuePair<string, ParsingCacheEntry> pce in parsings) { DateTime ptim = pce.Value.AccessTime; if (ptim < tim) { tim = ptim; toDelete = pce.Key; } } parsings.Remove (toDelete); } ParsingCacheEntry en = new ParsingCacheEntry(); en.ParseInformation = info; en.AccessTime = DateTime.Now; parsings [info.FileName] = en; } }
void HandleParseInformationUpdaterWorkerThreadDoWork (bool firstTime, ParsedDocument parsedDocument, CancellationToken token = default(CancellationToken)) { var doc = Document; if (doc == null || parsedDocument == null) return; UpdateErrorUndelines (parsedDocument); if (!options.ShowFoldMargin) return; // don't update parsed documents that contain errors - the foldings from there may be invalid. if (parsedDocument.HasErrors) return; try { List<FoldSegment > foldSegments = new List<FoldSegment> (); bool updateSymbols = parsedDocument.Defines.Count != symbols.Count; if (!updateSymbols) { foreach (PreProcessorDefine define in parsedDocument.Defines) { if (token.IsCancellationRequested) return; if (!symbols.Contains (define.Define)) { updateSymbols = true; break; } } } if (updateSymbols) { symbols.Clear (); foreach (PreProcessorDefine define in parsedDocument.Defines) { symbols.Add (define.Define); } } foreach (FoldingRegion region in parsedDocument.Foldings) { if (token.IsCancellationRequested) return; FoldingType type = FoldingType.None; bool setFolded = false; bool folded = false; //decide whether the regions should be folded by default switch (region.Type) { case FoldType.Member: type = FoldingType.TypeMember; break; case FoldType.Type: type = FoldingType.TypeDefinition; break; case FoldType.UserRegion: type = FoldingType.Region; setFolded = options.DefaultRegionsFolding; folded = true; break; case FoldType.Comment: type = FoldingType.Comment; setFolded = options.DefaultCommentFolding; folded = true; break; case FoldType.CommentInsideMember: type = FoldingType.Comment; setFolded = options.DefaultCommentFolding; folded = false; break; case FoldType.Undefined: setFolded = true; folded = region.IsFoldedByDefault; break; } //add the region FoldSegment marker = AddMarker (foldSegments, region.Name, region.Region, type); //and, if necessary, set its fold state if (marker != null && setFolded && firstTime) { // only fold on document open, later added folds are NOT folded by default. marker.IsFolded = folded; continue; } if (marker != null && region.Region.IsInside (textEditorData.Caret.Line, textEditorData.Caret.Column)) marker.IsFolded = false; } doc.UpdateFoldSegments (foldSegments, false, true, token); if (reloadSettings) { reloadSettings = false; Application.Invoke (delegate { if (isDisposed) return; view.LoadSettings (); mainsw.QueueDraw (); }); } } catch (Exception ex) { LoggingService.LogError ("Unhandled exception in ParseInformationUpdaterWorkerThread", ex); } }
protected override void Initialize () { base.Initialize (); // Delay the execution of UpdateOwnerProjects since it may end calling Document.AttachToProject, // which shouldn't be called while the extension chain is being initialized. // TODO: Move handling of owner projects to Document Application.Invoke (delegate { UpdateOwnerProjects (); }); var parser = new XmlParser (CreateRootState (), false); tracker = new DocumentStateTracker<XmlParser> (parser, Editor); DocumentContext.DocumentParsed += UpdateParsedDocument; Editor.CaretPositionChanged += HandleCaretPositionChanged; if (DocumentContext.ParsedDocument != null) { lastCU = DocumentContext.ParsedDocument; OnParsedDocumentUpdated (); } if (IdeApp.Workspace != null) { IdeApp.Workspace.FileAddedToProject += HandleProjectChanged; IdeApp.Workspace.FileRemovedFromProject += HandleProjectChanged; } }
public void throws_if_null_arguments_are_passed(ParsedDocument parsedDocument, IContentAnalyzer contentAnalyzer) { Assert.Throws<ArgumentNullException>( () => { Target.AnalyzeParsedContent(parsedDocument, contentAnalyzer); }); }
public static List<InsertionPoint> GetInsertionPoints (TextEditorData data, ParsedDocument parsedDocument, IUnresolvedTypeDefinition type) { if (data == null) throw new ArgumentNullException ("data"); if (parsedDocument == null) throw new ArgumentNullException ("parsedDocument"); if (type == null) throw new ArgumentNullException ("type"); // update type from parsed document, since this is always newer. //type = parsedDocument.GetInnermostTypeDefinition (type.GetLocation ()) ?? type; List<InsertionPoint> result = new List<InsertionPoint> (); int offset = data.LocationToOffset (type.Region.Begin); if (offset < 0 || type.BodyRegion.IsEmpty) return result; while (offset < data.Length && data.GetCharAt (offset) != '{') { offset++; } var realStartLocation = data.OffsetToLocation (offset); result.Add (GetInsertionPosition (data.Document, realStartLocation.Line, realStartLocation.Column)); result [0].LineBefore = NewLineInsertion.None; foreach (var member in type.Members) { TextLocation domLocation = member.BodyRegion.End; if (domLocation.Line <= 0) { DocumentLine lineSegment = data.GetLine (member.Region.BeginLine); if (lineSegment == null) continue; domLocation = new TextLocation (member.Region.BeginLine, lineSegment.Length + 1); } result.Add (GetInsertionPosition (data.Document, domLocation.Line, domLocation.Column)); } foreach (var nestedType in type.NestedTypes) { TextLocation domLocation = nestedType.BodyRegion.End; if (domLocation.Line <= 0) { DocumentLine lineSegment = data.GetLine (nestedType.Region.BeginLine); if (lineSegment == null) continue; domLocation = new TextLocation (nestedType.Region.BeginLine, lineSegment.Length + 1); } result.Add (GetInsertionPosition (data.Document, domLocation.Line, domLocation.Column)); } result [result.Count - 1].LineAfter = NewLineInsertion.None; CheckStartPoint (data.Document, result [0], result.Count == 1); if (result.Count > 1) { result.RemoveAt (result.Count - 1); NewLineInsertion insertLine; var lineBefore = data.GetLine (type.BodyRegion.EndLine - 1); if (lineBefore != null && lineBefore.Length == lineBefore.GetIndentation (data.Document).Length) { insertLine = NewLineInsertion.None; } else { insertLine = NewLineInsertion.Eol; } // search for line start int col = type.BodyRegion.EndColumn - 1; var line = data.GetLine (type.BodyRegion.EndLine); if (line != null) { while (col > 1 && char.IsWhiteSpace (data.GetCharAt (line.Offset + col - 2))) col--; } result.Add (new InsertionPoint (new DocumentLocation (type.BodyRegion.EndLine, col), insertLine, NewLineInsertion.Eol)); CheckEndPoint (data.Document, result [result.Count - 1], result.Count == 1); } foreach (var region in parsedDocument.UserRegions.Where (r => type.BodyRegion.IsInside (r.Region.Begin))) { result.Add (new InsertionPoint (new DocumentLocation (region.Region.BeginLine + 1, 1), NewLineInsertion.Eol, NewLineInsertion.Eol)); result.Add (new InsertionPoint (new DocumentLocation (region.Region.EndLine, 1), NewLineInsertion.Eol, NewLineInsertion.Eol)); result.Add (new InsertionPoint (new DocumentLocation (region.Region.EndLine + 1, 1), NewLineInsertion.Eol, NewLineInsertion.Eol)); } result.Sort ((left, right) => left.Location.CompareTo (right.Location)); return result; }
IEnumerable <MemberReference> SearchMember(INode member, ProjectDom dom, FilePath fileName, Mono.TextEditor.TextEditorData editor, Mono.TextEditor.Document buildDocument, List <LocalDocumentInfo.OffsetInfo> offsetInfos, ParsedDocument parsedDocument) { var resolver = new NRefactoryResolver(dom, parsedDocument.CompilationUnit, ICSharpCode.OldNRefactory.SupportedLanguage.CSharp, editor, fileName); FindMemberAstVisitor visitor = new FindMemberAstVisitor(buildDocument, member); visitor.IncludeXmlDocumentation = IncludeDocumentation; visitor.RunVisitor(resolver); foreach (var result in visitor.FoundReferences) { var offsetInfo = offsetInfos.FirstOrDefault(info => info.ToOffset <= result.Position && result.Position < info.ToOffset + info.Length); if (offsetInfo == null) { continue; } var offset = offsetInfo.FromOffset + result.Position - offsetInfo.ToOffset; var loc = editor.OffsetToLocation(offset); yield return(new MemberReference(null, fileName, offset, loc.Line, loc.Column, result.Name, null)); } }
async void StartReparseThreadDelayed(FilePath currentParseFile) { var editor = Editor; if (editor == null || editor.IsDisposed) { return; } // Don't directly parse the document because doing it at every key press is // very inefficient. Do it after a small delay instead, so several changes can // be parsed at the same time. await EnsureAnalysisDocumentIsOpen(); var currentParseText = editor.CreateSnapshot(); string mimeType = editor.MimeType; CancelOldParsing(); var token = parseTokenSource.Token; var currentProject = Project; var projectsContainingFile = currentProject?.ParentSolution?.GetProjectsContainingFile(currentParseFile); if (projectsContainingFile == null || !projectsContainingFile.Any()) { projectsContainingFile = new Project [] { currentProject } } ; ThreadPool.QueueUserWorkItem(delegate { foreach (var project in projectsContainingFile) { var projectFile = project?.GetProjectFile(currentParseFile); var options = new ParseOptions { Project = project, Content = currentParseText, FileName = currentParseFile, OldParsedDocument = parsedDocument, RoslynDocument = AnalysisDocument, IsAdhocProject = false }; if (projectFile != null) { options.BuildAction = projectFile.BuildAction; } if (project != null && typeSystemService.CanParseProjections(project, mimeType, currentParseFile)) { typeSystemService.ParseProjection(options, mimeType, token).ContinueWith((task, state) => { if (token.IsCancellationRequested) { return; } if (currentProject != state) { return; } Runtime.RunInMainThread(() => { // this may be called after the document has closed, in that case the OnDocumentParsed event shouldn't be invoked. var taskResult = task.Result; if (IsDisposed || taskResult == null || token.IsCancellationRequested) { return; } this.parsedDocument = taskResult.ParsedDocument; var projections = taskResult.Projections; foreach (var p2 in projections) { p2.CreateProjectedEditor(this); } Editor.SetOrUpdateProjections(this, projections, taskResult.DisabledProjectionFeatures); OnDocumentParsed(EventArgs.Empty); }); }, project, TaskContinuationOptions.OnlyOnRanToCompletion); } else if (project == null || currentProject == project) { typeSystemService.ParseFile(options, mimeType, token).ContinueWith(task => { if (token.IsCancellationRequested) { return; } Runtime.RunInMainThread(() => { // this may be called after the document has closed, in that case the OnDocumentParsed event shouldn't be invoked. if (IsDisposed || task.Result == null || token.IsCancellationRequested) { return; } this.parsedDocument = task.Result; OnDocumentParsed(EventArgs.Empty); }); }, TaskContinuationOptions.OnlyOnRanToCompletion); } } }); } void InitializeEditor() { Editor.TextChanged += (o, a) => { if (parsedDocument != null) { parsedDocument.IsInvalid = true; } if (Editor.IsInAtomicUndo) { wasEdited = true; } else { StartReparseThread(); } }; Editor.BeginAtomicUndoOperation += delegate { wasEdited = false; }; Editor.EndAtomicUndoOperation += delegate { if (wasEdited) { StartReparseThread(); } }; if (Editor.DocumentContext != this) { Editor.InitializeExtensionChain(this); } UpdateTextBufferRegistration(); }
protected override void RefillOutlineStore (ParsedDocument doc, Gtk.TreeStore store) { var htmlRoot = razorDocument.PageInfo.HtmlRoot; var razorRoot = razorDocument.PageInfo.RazorRoot; var blocks = new List<Block> (); GetBlocks (razorRoot, blocks); BuildTreeChildren (store, Gtk.TreeIter.Zero, htmlRoot, blocks); }
public IType GetClass(bool getUserClass) { if (targetObject == null) { return(null); } IType cls = gproject.FindClass(className, getUserClass); if (cls != null) { return(cls); } // The class name may have changed. Try to guess the new name. ArrayList matches = new ArrayList(); ICompilationUnit unit = null; ProjectDom ctx = gproject.GetParserContext(); ParsedDocument doc = ProjectDomService.Parse(project, classFile); if (doc != null && doc.CompilationUnit != null) { unit = doc.CompilationUnit; foreach (IType fcls in unit.Types) { if (IsValidClass(ctx, fcls, targetObject)) { matches.Add(fcls); } } } // If found the class, just return it if (matches.Count == 1) { cls = (IType)matches [0]; className = cls.FullName; targetObject.Name = className; gproject.Save(true); return(cls); } // If not found, warn the user. if (unit != null && unit.Types.Count > 0) { using (SelectRenamedClassDialog dialog = new SelectRenamedClassDialog(unit.Types)) { if (dialog.Run()) { className = dialog.SelectedClass; if (className == null) { return(null); } else { targetObject.Name = className; gproject.Save(true); return(gproject.FindClass(className)); } } } } else { MessageService.ShowError(GettextCatalog.GetString("The class bound to the component '{0}' could not be found. This may be due to syntax errors in the source code file.", GetObjectName(targetObject))); } return(null); }
void UpdateParsedDocument (object sender, EventArgs args) { lastCU = DocumentContext.ParsedDocument; OnParsedDocumentUpdated (); }
void RunTest(string test, LocalVariable localVariable) { StringBuilder testText = new StringBuilder(); List <DomLocation> expectedReferences = new List <DomLocation> (); DomLocation memberLocation = DomLocation.Empty; int line = 1, col = 1; foreach (char ch in test) { switch (ch) { case '$': memberLocation = new DomLocation(line, col); break; case '@': expectedReferences.Add(new DomLocation(line, col)); break; default: col++; if (ch == '\n') { col = 1; line++; } testText.Append(ch); break; } } DotNetProject project = new DotNetAssemblyProject("C#"); project.FileName = "/tmp/a.csproj"; SimpleProjectDom dom = new SimpleProjectDom(); dom.Project = project; ProjectDomService.RegisterDom(dom, "Project:" + project.FileName); ParsedDocument parsedDocument = parser.Parse(null, "a.cs", testText.ToString()); dom.Add(parsedDocument.CompilationUnit); TestViewContent testViewContent = new TestViewContent(); testViewContent.Name = "a.cs"; testViewContent.Text = testText.ToString(); // RefactorerContext ctx = new RefactorerContext (dom, new DumbTextFileProvider(testViewContent), null); NRefactoryResolver resolver = new NRefactoryResolver(dom, parsedDocument.CompilationUnit, testViewContent.Data, "a.cs"); SearchMemberVisitor smv = new SearchMemberVisitor(memberLocation.Line); if (localVariable != null) { ((LocalVariable)localVariable).DeclaringMember = parsedDocument.CompilationUnit.GetMemberAt(expectedReferences[0]); smv.FoundMember = localVariable; } else { smv.Visit(parsedDocument.CompilationUnit, null); if (smv.FoundMember == null) { ResolveResult resolveResult = resolver.ResolveIdentifier("a", memberLocation); if (resolveResult is LocalVariableResolveResult) { smv.FoundMember = ((LocalVariableResolveResult)resolveResult).LocalVariable; } } } Assert.IsNotNull(smv.FoundMember, "Member to search not found."); if (smv.FoundMember is IType) { smv.FoundMember = dom.GetType(((IType)smv.FoundMember).FullName, ((IType)smv.FoundMember).TypeParameters.Count, true); } FindMemberAstVisitor astVisitor = new FindMemberAstVisitor(testViewContent.GetTextEditorData().Document, resolver, smv.FoundMember); astVisitor.RunVisitor(); int i = 0, j = 0; StringBuilder errorText = new StringBuilder(); Document doc = new Document(); doc.Text = testViewContent.Text; while (i < expectedReferences.Count && j < astVisitor.FoundReferences.Count) { if (expectedReferences[i].Line != astVisitor.FoundReferences[j].Line || expectedReferences[i].Column != astVisitor.FoundReferences[j].Column) { if (expectedReferences[i].Line < astVisitor.FoundReferences[j].Line) { errorText.Append("Reference at line " + expectedReferences[i].Line + " not found."); errorText.AppendLine(); errorText.Append(doc.GetTextAt(doc.GetLine(expectedReferences[i].Line)).Replace('\t', ' ')); errorText.AppendLine(); errorText.Append(new string (' ', expectedReferences[i].Column)); errorText.Append('^'); errorText.AppendLine(); i++; continue; } if (expectedReferences[i].Line > astVisitor.FoundReferences[j].Line) { errorText.Append("Found unexpected Reference at line " + astVisitor.FoundReferences[j].Line); errorText.AppendLine(); errorText.Append(doc.GetTextAt(doc.GetLine(astVisitor.FoundReferences[j].Line)).Replace('\t', ' ')); errorText.AppendLine(); errorText.Append(new string (' ', astVisitor.FoundReferences[j].Column)); errorText.Append('^'); errorText.AppendLine(); j++; continue; } errorText.Append("Column mismatch at line " + astVisitor.FoundReferences[j].Line + " was: " + astVisitor.FoundReferences[j].Column + " should be:" + expectedReferences[i].Column); errorText.AppendLine(); errorText.Append(doc.GetTextAt(doc.GetLine(astVisitor.FoundReferences[j].Line)).Replace('\t', ' ')); errorText.Append(new string (' ', expectedReferences[i].Column)); errorText.Append('^'); errorText.AppendLine(); errorText.Append(new string (' ', astVisitor.FoundReferences[j].Column)); errorText.Append('^'); errorText.AppendLine(); } i++; j++; } while (i < expectedReferences.Count) { errorText.Append("Reference at line " + expectedReferences[i].Line + " not found."); errorText.AppendLine(); errorText.Append(doc.GetTextAt(doc.GetLine(expectedReferences[i].Line)).Replace('\t', ' ')); errorText.AppendLine(); errorText.Append(new string (' ', expectedReferences[j].Column)); errorText.Append('^'); errorText.AppendLine(); i++; } while (j < astVisitor.FoundReferences.Count) { errorText.Append("Found unexpected Reference at line " + astVisitor.FoundReferences[j].Line); errorText.AppendLine(); errorText.Append(doc.GetTextAt(doc.GetLine(astVisitor.FoundReferences[j].Line)).Replace('\t', ' ')); errorText.AppendLine(); errorText.Append(new string (' ', astVisitor.FoundReferences[i].Column)); errorText.Append('^'); errorText.AppendLine(); j++; } if (errorText.Length > 0) { Assert.Fail("Member to find:" + smv.FoundMember + Environment.NewLine + errorText.ToString() + Environment.NewLine + "found : " + astVisitor.FoundReferences.Count + " expected:" + expectedReferences.Count); } }
protected virtual void RefillOutlineStore (ParsedDocument doc, TreeStore store) { XDocument xdoc = ((XmlParsedDocument)doc).XDocument; if (xdoc == null) return; BuildTreeChildren (store, TreeIter.Zero, xdoc); }
public void throws_if_null_arguments_are_passed(ParsedDocument parsedDocument, IContentAnalyzer contentAnalyzer) { Assert.That(() => Target.AnalyzeParsedContent(parsedDocument, contentAnalyzer), Throws.TypeOf <ArgumentNullException>()); }
void UpdateQuickTasks (ParsedDocument doc) { tasks.Clear (); foreach (var cmt in doc.TagComments) { var newTask = new QuickTask (cmt.Text, cmt.Region.Begin, Severity.Hint); tasks.Add (newTask); } foreach (var error in doc.Errors) { var newTask = new QuickTask (error.Message, error.Region.Begin, error.ErrorType == ErrorType.Error ? Severity.Error : Severity.Warning); tasks.Add (newTask); } OnTasksUpdated (EventArgs.Empty); }
async void StartReparseThreadDelayed(FilePath currentParseFile) { var editor = Editor; if (editor == null || editor.IsDisposed) { return; } // Don't directly parse the document because doing it at every key press is // very inefficient. Do it after a small delay instead, so several changes can // be parsed at the same time. await EnsureAnalysisDocumentIsOpen(); var currentParseText = editor.CreateSnapshot(); string mimeType = editor.MimeType; CancelOldParsing(); var token = parseTokenSource.Token; var currentProject = adhocProject ?? Project; var projectsContainingFile = currentProject?.ParentSolution?.GetProjectsContainingFile(currentParseFile); if (projectsContainingFile == null || !projectsContainingFile.Any()) { projectsContainingFile = new Project [] { currentProject } } ; ThreadPool.QueueUserWorkItem(delegate { foreach (var project in projectsContainingFile) { var projectFile = project?.GetProjectFile(currentParseFile); TypeSystemService.AddSkippedFile(currentParseFile); var options = new ParseOptions { Project = project, Content = currentParseText, FileName = currentParseFile, OldParsedDocument = parsedDocument, RoslynDocument = AnalysisDocument, IsAdhocProject = IsAdHocProject }; if (projectFile != null) { options.BuildAction = projectFile.BuildAction; } if (project != null && TypeSystemService.CanParseProjections(project, mimeType, currentParseFile)) { TypeSystemService.ParseProjection(options, mimeType, token).ContinueWith((task, state) => { if (token.IsCancellationRequested) { return; } if (currentProject != state) { return; } Application.Invoke((o, args) => { // this may be called after the document has closed, in that case the OnDocumentParsed event shouldn't be invoked. var taskResult = task.Result; if (isClosed || taskResult == null || token.IsCancellationRequested) { return; } this.parsedDocument = taskResult.ParsedDocument; var projections = taskResult.Projections; foreach (var p2 in projections) { p2.CreateProjectedEditor(this); } Editor.SetOrUpdateProjections(this, projections, taskResult.DisabledProjectionFeatures); OnDocumentParsed(EventArgs.Empty); }); }, project, TaskContinuationOptions.OnlyOnRanToCompletion); } else if (project == null || currentProject == project) { TypeSystemService.ParseFile(options, mimeType, token).ContinueWith(task => { if (token.IsCancellationRequested) { return; } Application.Invoke((o, args) => { // this may be called after the document has closed, in that case the OnDocumentParsed event shouldn't be invoked. if (isClosed || task.Result == null || token.IsCancellationRequested) { return; } this.parsedDocument = task.Result; OnDocumentParsed(EventArgs.Empty); }); }, TaskContinuationOptions.OnlyOnRanToCompletion); } } }); }
internal void UpdateParsedDocument (ParsedDocument document) { if (this.isDisposed || document == null || this.view == null) return; SetParsedDocument (document, parsedDocument != null); }
public override void AddLocalNamespaceImport(RefactorerContext ctx, string fileName, string nsName, DomLocation caretLocation) { IEditableTextFile file = ctx.GetFile(fileName); int pos = 0; ParsedDocument parsedDocument = parser.Parse(ctx.ParserContext, fileName, file.Text); StringBuilder text = new StringBuilder(); string indent = ""; if (parsedDocument.CompilationUnit != null) { IUsing containingUsing = null; foreach (IUsing u in parsedDocument.CompilationUnit.Usings) { if (u.IsFromNamespace && u.Region.Contains(caretLocation)) { containingUsing = u; } } if (containingUsing != null) { indent = GetLineIndent(file, containingUsing.Region.Start.Line); IUsing lastUsing = null; foreach (IUsing u in parsedDocument.CompilationUnit.Usings) { if (u == containingUsing) { continue; } if (containingUsing.Region.Contains(u.Region)) { if (u.IsFromNamespace) { break; } lastUsing = u; } } if (lastUsing != null) { pos = file.GetPositionFromLineColumn(lastUsing.Region.End.Line, lastUsing.Region.End.Column); } else { pos = file.GetPositionFromLineColumn(containingUsing.ValidRegion.Start.Line, containingUsing.ValidRegion.Start.Column); // search line end while (pos < file.Length) { char ch = file.GetCharAt(pos); if (ch == '\n') { if (file.GetCharAt(pos + 1) == '\r') { pos++; } break; } else if (ch == '\r') { break; } pos++; } } } else { AddGlobalNamespaceImport(ctx, fileName, nsName); return; } } if (pos != 0) { text.AppendLine(); } text.Append(indent); text.Append("\t"); text.Append("using "); text.Append(nsName); text.Append(";"); if (file is Mono.TextEditor.ITextEditorDataProvider) { Mono.TextEditor.TextEditorData data = ((Mono.TextEditor.ITextEditorDataProvider)file).GetTextEditorData(); if (pos == 0) { text.Append(data.EolMarker); } int caretOffset = data.Caret.Offset; int insertedChars = data.Insert(pos, text.ToString()); if (pos < caretOffset) { data.Caret.Offset = caretOffset + insertedChars; } } else { if (pos == 0) { text.AppendLine(); } file.InsertText(pos, text.ToString()); } }
void UpdateErrorUndelines (ParsedDocument parsedDocument) { if (!options.UnderlineErrors || parsedDocument == null) return; Application.Invoke (delegate { if (!quickTaskProvider.Contains (this)) AddQuickTaskProvider (this); RemoveErrorUndelinesResetTimerId (); const uint timeout = 500; resetTimerId = GLib.Timeout.Add (timeout, delegate { if (!this.isDisposed) { Document doc = this.TextEditor != null ? this.TextEditor.Document : null; if (doc != null) { RemoveErrorUnderlines (doc); // Else we underline the error if (parsedDocument.Errors != null) { foreach (var error in parsedDocument.Errors) UnderLineError (doc, error); } } } resetTimerId = 0; return false; }); UpdateQuickTasks (parsedDocument); }); }
internal static async Task UpdateFoldings(TextEditor Editor, ParsedDocument parsedDocument, DocumentLocation caretLocation, bool firstTime = false, CancellationToken token = default(CancellationToken)) { if (parsedDocument == null || !Editor.Options.ShowFoldMargin || parsedDocument.Flags.HasFlag(ParsedDocumentFlags.SkipFoldings)) { return; } // don't update parsed documents that contain errors - the foldings from there may be invalid. if (await parsedDocument.HasErrorsAsync(token)) { return; } try { var foldSegments = new List <IFoldSegment> (); foreach (FoldingRegion region in await parsedDocument.GetFoldingsAsync(token)) { if (token.IsCancellationRequested) { return; } var type = FoldingType.Unknown; bool setFolded = false; bool folded = false; //decide whether the regions should be folded by default switch (region.Type) { case FoldType.Member: type = FoldingType.TypeMember; break; case FoldType.Type: type = FoldingType.TypeDefinition; break; case FoldType.UserRegion: type = FoldingType.Region; setFolded = DefaultSourceEditorOptions.Instance.DefaultRegionsFolding; folded = true; break; case FoldType.Comment: type = FoldingType.Comment; setFolded = DefaultSourceEditorOptions.Instance.DefaultCommentFolding; folded = true; break; case FoldType.CommentInsideMember: type = FoldingType.Comment; setFolded = DefaultSourceEditorOptions.Instance.DefaultCommentFolding; folded = false; break; case FoldType.Undefined: setFolded = true; folded = region.IsFoldedByDefault; break; } var start = Editor.LocationToOffset(region.Region.Begin); var end = Editor.LocationToOffset(region.Region.End); var marker = Editor.CreateFoldSegment(start, end - start); foldSegments.Add(marker); marker.CollapsedText = region.Name; marker.FoldingType = type; //and, if necessary, set its fold state if (marker != null && setFolded && firstTime) { // only fold on document open, later added folds are NOT folded by default. marker.IsCollapsed = folded; continue; } if (marker != null && region.Region.Contains(caretLocation.Line, caretLocation.Column)) { marker.IsCollapsed = false; } } if (firstTime) { Editor.SetFoldings(foldSegments); } else { Application.Invoke((o, args) => { if (!token.IsCancellationRequested) { Editor.SetFoldings(foldSegments); } }); } } catch (OperationCanceledException) { } catch (Exception ex) { LoggingService.LogError("Unhandled exception in ParseInformationUpdaterWorkerThread", ex); } }
static PathEntry GetRegionEntry (ParsedDocument unit, Mono.TextEditor.DocumentLocation loc) { PathEntry entry; if (!unit.UserRegions.Any ()) return null; var reg = unit.UserRegions.Where (r => r.Region.IsInside (loc)).LastOrDefault (); if (reg == null) { entry = new PathEntry (GettextCatalog.GetString ("No region")); } else { entry = new PathEntry (CompilationUnitDataProvider.Pixbuf, GLib.Markup.EscapeText (reg.Name)); } entry.Position = EntryPosition.Right; return entry; }
public override bool CanProvideBraceCompletion(char brace, int openingPosition, ParsedDocument document, CancellationToken cancellationToken) { // Only potentially valid for string literal completion if not in an interpolated string brace completion context. if (OpeningBrace == brace && InterpolatedStringBraceCompletionService.IsPositionInInterpolatedStringContext(document, openingPosition, cancellationToken)) { return(false); } return(base.CanProvideBraceCompletion(brace, openingPosition, document, cancellationToken)); }