public static PooledObject <List <T> > CreateList <T>() => SharedPools.Default <List <T> >().GetPooledObject();
public static PooledObject <Dictionary <Checksum, object> > CreateResultSet() { return(SharedPools.Default <Dictionary <Checksum, object> >().GetPooledObject()); }
public async Task <IEnumerable <Diagnostic> > GetSemanticDiagnosticsAsync(DiagnosticAnalyzer analyzer) { var model = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); Contract.ThrowIfNull(this.document); Contract.ThrowIfFalse(analyzer.SupportsSemanticDiagnosticAnalysis(this)); using (var pooledObject = SharedPools.Default <List <Diagnostic> >().GetPooledObject()) { var diagnostics = pooledObject.Object; // Stateless semantic analyzers: // 1) ISemanticModelAnalyzer/IDocumentBasedDiagnosticAnalyzer // 2) ISymbolAnalyzer // 3) ISyntaxNodeAnalyzer cancellationToken.ThrowIfCancellationRequested(); var documentAnalyzer = analyzer as DocumentDiagnosticAnalyzer; if (documentAnalyzer != null) { try { await documentAnalyzer.AnalyzeSemanticsAsync(this.document, diagnostics.Add, this.cancellationToken).ConfigureAwait(false); } catch (Exception e) when(CatchAnalyzerException(e, analyzer)) { var exceptionDiagnostics = AnalyzerExceptionToDiagnostics(analyzer, e, cancellationToken); return(model == null ? exceptionDiagnostics : GetFilteredDocumentDiagnostics(exceptionDiagnostics, model.Compilation)); } } else { var analyzerActions = await GetAnalyzerActionsAsync(analyzer, diagnostics.Add).ConfigureAwait(false); if (analyzerActions != null) { // SemanticModel actions. if (analyzerActions.SemanticModelActionsCount > 0) { AnalyzerDriverHelper.ExecuteSemanticModelActions(analyzerActions, model, this.analyzerOptions, diagnostics.Add, CatchAnalyzerException, cancellationToken); } var compilation = model.Compilation; // Symbol actions. if (analyzerActions.SymbolActionsCount > 0) { var symbols = this.GetSymbolsToAnalyze(model); AnalyzerDriverHelper.ExecuteSymbolActions(analyzerActions, symbols, compilation, this.analyzerOptions, diagnostics.Add, CatchAnalyzerException, this.cancellationToken); } if (this.SyntaxNodeAnalyzerService != null) { // SyntaxNode actions. if (analyzerActions.SyntaxNodeActionsCount > 0) { this.SyntaxNodeAnalyzerService.ExecuteSyntaxNodeActions(analyzerActions, GetSyntaxNodesToAnalyze(), model, this.analyzerOptions, diagnostics.Add, CatchAnalyzerException, cancellationToken); } // CodeBlockStart, CodeBlockEnd, and generated SyntaxNode actions. if (analyzerActions.CodeBlockStartActionsCount > 0 || analyzerActions.CodeBlockEndActionsCount > 0) { this.SyntaxNodeAnalyzerService.ExecuteCodeBlockActions(analyzerActions, this.GetDeclarationInfos(model), model, this.analyzerOptions, diagnostics.Add, CatchAnalyzerException, cancellationToken); } } } } var result = model == null ? diagnostics : GetFilteredDocumentDiagnostics(diagnostics, model.Compilation); return(result.ToImmutableArray()); } }
public AbstractFormattingRule CreateRule(Document document, int position) { if (!(document.Project.Solution.Workspace is VisualStudioWorkspaceImpl visualStudioWorkspace)) { return(NoOpFormattingRule.Instance); } var containedDocument = visualStudioWorkspace.TryGetContainedDocument(document.Id); if (containedDocument == null) { return(NoOpFormattingRule.Instance); } var textContainer = document.GetTextSynchronously(CancellationToken.None).Container; if (!(textContainer.TryGetTextBuffer() is IProjectionBuffer)) { return(NoOpFormattingRule.Instance); } using var pooledObject = SharedPools.Default <List <TextSpan> >().GetPooledObject(); var spans = pooledObject.Object; var root = document.GetSyntaxRootSynchronously(CancellationToken.None); var text = root.SyntaxTree.GetText(CancellationToken.None); spans.AddRange(containedDocument.GetEditorVisibleSpans()); for (var i = 0; i < spans.Count; i++) { var visibleSpan = spans[i]; if (visibleSpan.IntersectsWith(position) || visibleSpan.End == position) { return(containedDocument.GetBaseIndentationRule(root, text, spans, i)); } } // in razor (especially in @helper tag), it is possible for us to be asked for next line of visible span var line = text.Lines.GetLineFromPosition(position); if (line.LineNumber > 0) { line = text.Lines[line.LineNumber - 1]; // find one that intersects with previous line for (var i = 0; i < spans.Count; i++) { var visibleSpan = spans[i]; if (visibleSpan.IntersectsWith(line.Span)) { return(containedDocument.GetBaseIndentationRule(root, text, spans, i)); } } } FatalError.ReportWithoutCrash( new InvalidOperationException($"Can't find an intersection. Visible spans count: {spans.Count}")); return(NoOpFormattingRule.Instance); }
private async Task <Project> UpdateDocumentsAsync( Project project, IEnumerable <TextDocumentState> existingTextDocumentStates, ChecksumCollection oldChecksums, ChecksumCollection newChecksums, Func <Solution, ImmutableArray <DocumentInfo>, Solution> addDocuments, Func <Solution, DocumentId, Solution> removeDocument) { using var olds = SharedPools.Default <HashSet <Checksum> >().GetPooledObject(); using var news = SharedPools.Default <HashSet <Checksum> >().GetPooledObject(); olds.Object.UnionWith(oldChecksums); news.Object.UnionWith(newChecksums); // remove documents that exist in both side olds.Object.ExceptWith(newChecksums); news.Object.ExceptWith(oldChecksums); var oldMap = await GetDocumentMapAsync(existingTextDocumentStates, olds.Object).ConfigureAwait(false); var newMap = await GetDocumentMapAsync(_assetProvider, news.Object).ConfigureAwait(false); // added document ImmutableArray <DocumentInfo> .Builder?lazyDocumentsToAdd = null; foreach (var(documentId, newDocumentChecksums) in newMap) { if (!oldMap.ContainsKey(documentId)) { lazyDocumentsToAdd ??= ImmutableArray.CreateBuilder <DocumentInfo>(); // we have new document added var documentInfo = await _assetProvider.CreateDocumentInfoAsync(newDocumentChecksums.Checksum, _cancellationToken).ConfigureAwait(false); lazyDocumentsToAdd.Add(documentInfo); } } if (lazyDocumentsToAdd != null) { project = addDocuments(project.Solution, lazyDocumentsToAdd.ToImmutable()).GetProject(project.Id) !; } // changed document foreach (var(documentId, newDocumentChecksums) in newMap) { if (!oldMap.TryGetValue(documentId, out var oldDocumentChecksums)) { continue; } Contract.ThrowIfTrue(oldDocumentChecksums.Checksum == newDocumentChecksums.Checksum); var document = project.GetDocument(documentId) ?? project.GetAdditionalDocument(documentId) ?? project.GetAnalyzerConfigDocument(documentId); Contract.ThrowIfNull(document); project = await UpdateDocumentAsync(document, oldDocumentChecksums, newDocumentChecksums).ConfigureAwait(false); } // removed document foreach (var(documentId, _) in oldMap) { if (!newMap.ContainsKey(documentId)) { // we have a document removed project = removeDocument(project.Solution, documentId).GetProject(project.Id) !; } } return(project); }
protected override int CreateNavInfo(SYMBOL_DESCRIPTION_NODE[] rgSymbolNodes, uint ulcNodes, out IVsNavInfo ppNavInfo) { Debug.Assert(rgSymbolNodes != null || ulcNodes > 0, "Invalid input parameters into CreateNavInfo"); ppNavInfo = null; var count = 0; string libraryName = null; string referenceOwnerName = null; if (rgSymbolNodes[0].dwType != (uint)_LIB_LISTTYPE.LLT_PACKAGE) { Debug.Fail("Symbol description should always contain LLT_PACKAGE node as first node"); return(VSConstants.E_INVALIDARG); } else { count++; // If second node is also a package node, the below is the inference Node for // which NavInfo is generated is a 'referenced' node in CV // First package node ---> project item under which referenced node is displayed // Second package node ---> actual lib item node i.e., referenced assembly if (ulcNodes > 1 && rgSymbolNodes[1].dwType == (uint)_LIB_LISTTYPE.LLT_PACKAGE) { count++; referenceOwnerName = rgSymbolNodes[0].pszName; libraryName = rgSymbolNodes[1].pszName; } else { libraryName = rgSymbolNodes[0].pszName; } } var namespaceName = SharedPools.Default <StringBuilder>().AllocateAndClear(); var className = SharedPools.Default <StringBuilder>().AllocateAndClear(); var memberName = string.Empty; // Populate namespace, class and member names // Generate flattened names for nested namespaces and classes for (; count < ulcNodes; count++) { switch (rgSymbolNodes[count].dwType) { case (uint)_LIB_LISTTYPE.LLT_NAMESPACES: if (namespaceName.Length > 0) { namespaceName.Append("."); } namespaceName.Append(rgSymbolNodes[count].pszName); break; case (uint)_LIB_LISTTYPE.LLT_CLASSES: if (className.Length > 0) { className.Append("."); } className.Append(rgSymbolNodes[count].pszName); break; case (uint)_LIB_LISTTYPE.LLT_MEMBERS: if (memberName.Length > 0) { Debug.Fail("Symbol description cannot contain more than one LLT_MEMBERS node."); } memberName = rgSymbolNodes[count].pszName; break; } } SharedPools.Default <StringBuilder>().ClearAndFree(namespaceName); SharedPools.Default <StringBuilder>().ClearAndFree(className); // TODO: Make sure we pass the right value for Visual Basic. ppNavInfo = this.LibraryService.NavInfoFactory.Create(libraryName, referenceOwnerName, namespaceName.ToString(), className.ToString(), memberName); return(VSConstants.S_OK); }
public static void Free(StringBuilder builder) => SharedPools.Default <StringBuilder>().ClearAndFree(builder);
private bool WriteIdentifierLocations(int projectId, int documentId, Document document, VersionStamp version, SyntaxNode root, CancellationToken cancellationToken) { // delete any existing data if (!DeleteIdentifierLocations(projectId, documentId, cancellationToken)) { return(false); } var identifierMap = SharedPools.StringIgnoreCaseDictionary <int>().AllocateAndClear(); Dictionary <string, List <int> > map = null; try { map = CreateIdentifierLocations(document, root, cancellationToken); // okay, write new data using (var accessor = _esentStorage.GetIdentifierLocationTableAccessor()) { // make sure I have all identifier ready before starting big insertion int identifierId; foreach (var identifier in map.Keys) { if (!TryGetUniqueIdentifierId(identifier, out identifierId)) { return(false); } identifierMap[identifier] = identifierId; } // save whole map var uncommittedCount = 0; foreach (var kv in map) { cancellationToken.ThrowIfCancellationRequested(); var identifier = kv.Key; var positions = kv.Value; if ((uncommittedCount + positions.Count) > FlushThreshold) { accessor.Flush(); uncommittedCount = 0; } accessor.PrepareBatchOneInsert(); identifierId = identifierMap[identifier]; using (var stream = accessor.GetBatchInsertStream(projectId, documentId, identifierId)) using (var writer = new ObjectWriter(stream, cancellationToken: cancellationToken)) { writer.WriteString(IdentifierSetSerializationVersion); WriteList(writer, positions); } accessor.FinishBatchOneInsert(); uncommittedCount += positions.Count; } // save special identifier that indicates version for this document if (!TrySaveIdentifierSetVersion(accessor, projectId, documentId, version)) { return(false); } return(accessor.ApplyChanges()); } } finally { SharedPools.StringIgnoreCaseDictionary <int>().ClearAndFree(identifierMap); Free(map); } }
private SortedSet <IDottedRule> GetPredictedStates(DottedRuleSet frame) { var pool = SharedPools.Default <Queue <IDottedRule> >(); var queue = pool.AllocateAndClear(); var closure = new SortedSet <IDottedRule>(); for (int i = 0; i < frame.Data.Count; i++) { var state = frame.Data[i]; if (!IsComplete(state)) { queue.Enqueue(state); } } while (queue.Count > 0) { var state = queue.Dequeue(); if (IsComplete(state)) { continue; } var postDotSymbol = GetPostDotSymbol(state); if (postDotSymbol.SymbolType != SymbolType.NonTerminal) { continue; } var nonTerminalPostDotSymbol = postDotSymbol as INonTerminal; if (Grammar.IsTransativeNullable(nonTerminalPostDotSymbol)) { var preComputedState = GetPreComputedState(state.Production, state.Position + 1); if (!frame.Contains(preComputedState)) { if (closure.Add(preComputedState)) { if (!IsComplete(preComputedState)) { queue.Enqueue(preComputedState); } } } } var predictions = Grammar.RulesFor(nonTerminalPostDotSymbol); for (var p = 0; p < predictions.Count; p++) { var prediction = predictions[p]; var preComputedState = GetPreComputedState(prediction, 0); if (frame.Contains(preComputedState)) { continue; } if (!closure.Add(preComputedState)) { continue; } if (!IsComplete(preComputedState)) { queue.Enqueue(preComputedState); } } } pool.ClearAndFree(queue); return(closure); }
public static List <T> ReturnAndFree(List <T> list) { SharedPools.Default <List <T> >().ForgetTrackedObject(list); return(list); }
public TableDfa ToDfa() { var queuePool = SharedPools.Default <ProcessOnceQueue <Closure> >(); var queue = queuePool.Allocate(); queue.Clear(); var start = new Closure(Start, _nullTransitions, _finalStates); queue.Enqueue( start); var tableDfa = new TableDfa(start.GetHashCode()); while (queue.Count > 0) { var transitions = SharedPools .Default <Dictionary <char, SortedSet <int> > >() .AllocateAndClear(); var nfaClosure = queue.Dequeue(); var nfaClosureId = nfaClosure.GetHashCode(); tableDfa.SetFinal(nfaClosureId, nfaClosure.IsFinal); for (int i = 0; i < nfaClosure.States.Length; i++) { var state = nfaClosure.States[i]; Dictionary <char, int> characterTransitions = null; if (!_table.TryGetValue(state, out characterTransitions)) { continue; } foreach (var characterTransition in characterTransitions) { SortedSet <int> targets = null; if (!transitions.TryGetValue(characterTransition.Key, out targets)) { targets = SharedPools.Default <SortedSet <int> >().AllocateAndClear(); transitions.Add(characterTransition.Key, targets); } targets.Add(characterTransition.Value); } } foreach (var targetSet in transitions) { var closure = new Closure(targetSet.Value, _nullTransitions, _finalStates); closure = queue.EnqueueOrGetExisting(closure); var closureId = closure.GetHashCode(); tableDfa.AddTransition(nfaClosureId, targetSet.Key, closureId); tableDfa.SetFinal(closureId, closure.IsFinal); SharedPools.Default <SortedSet <int> >().ClearAndFree(targetSet.Value); } SharedPools .Default <Dictionary <char, SortedSet <int> > >() .ClearAndFree(transitions); } queuePool.Free(queue); return(tableDfa); }
public static List <T> Allocate() => SharedPools.Default <List <T> >().AllocateAndClear();
public IDfaState Transform(INfa nfa) { var processOnceQueue = new ProcessOnceQueue <NfaClosure>(); var set = SharedPools.Default <SortedSet <INfaState> >().AllocateAndClear(); foreach (var state in nfa.Start.Closure()) { set.Add(state); } var start = new NfaClosure(set, nfa.Start.Equals(nfa.End)); processOnceQueue.Enqueue(start); while (processOnceQueue.Count > 0) { var nfaClosure = processOnceQueue.Dequeue(); var transitions = SharedPools .Default <Dictionary <ITerminal, SortedSet <INfaState> > >() .AllocateAndClear(); for (int i = 0; i < nfaClosure.Closure.Length; i++) { var state = nfaClosure.Closure[i]; for (var t = 0; t < state.Transitions.Count; t++) { var transition = state.Transitions[t]; switch (transition.TransitionType) { case NfaTransitionType.Edge: var terminalTransition = transition as TerminalNfaTransition; var terminal = terminalTransition.Terminal; if (!transitions.ContainsKey(terminalTransition.Terminal)) { transitions[terminal] = SharedPools.Default <SortedSet <INfaState> >().AllocateAndClear(); } transitions[terminal].Add(transition.Target); break; } } } foreach (var terminal in transitions.Keys) { var targetStates = transitions[terminal]; var closure = Closure(targetStates, nfa.End); closure = processOnceQueue.EnqueueOrGetExisting(closure); nfaClosure.State.AddTransition( new DfaTransition(terminal, closure.State)); SharedPools.Default <SortedSet <INfaState> >().ClearAndFree(targetStates); } SharedPools .Default <SortedSet <INfaState> >() .ClearAndFree(nfaClosure.Set); SharedPools .Default <Dictionary <ITerminal, SortedSet <INfaState> > >() .ClearAndFree(transitions); } return(start.State); }
private void MemoizeTransitions(int iLoc) { var frameSet = Chart.Sets[iLoc]; // leo eligibility needs to be cached before creating the cached transition // if the size of the list is != 1, do not enter the cached frame transition var cachedTransitionsPool = SharedPools.Default <Dictionary <ISymbol, CachedDottedRuleSetTransition> >(); var cachedTransitions = cachedTransitionsPool.AllocateAndClear(); var cachedCountPool = SharedPools.Default <Dictionary <ISymbol, int> >(); var cachedCount = cachedCountPool.AllocateAndClear(); for (var i = 0; i < frameSet.States.Count; i++) { var stateFrame = frameSet.States[i]; var frame = stateFrame.DottedRuleSet; var frameData = frame.Data; var stateFrameDataCount = frameData.Count; for (var j = 0; j < stateFrameDataCount; j++) { var preComputedState = frameData[j]; if (preComputedState.IsComplete) { continue; } var postDotSymbol = preComputedState.PostDotSymbol; if (postDotSymbol.SymbolType != SymbolType.NonTerminal) { continue; } // leo eligibile items are right recursive directly or indirectly if (!_preComputedGrammar.Grammar.IsRightRecursive( preComputedState.Production.LeftHandSide)) { continue; } // to determine if the item is leo unique, cache it here var count = 0; if (!cachedCount.TryGetValue(postDotSymbol, out count)) { cachedCount[postDotSymbol] = 1; cachedTransitions[postDotSymbol] = CreateTopCachedItem(stateFrame, postDotSymbol); } else { cachedCount[postDotSymbol] = count + 1; } } } // add all memoized leo items to the frameSet foreach (var symbol in cachedCount.Keys) { var count = cachedCount[symbol]; if (count != 1) { continue; } frameSet.AddCachedTransition(cachedTransitions[symbol]); } cachedTransitionsPool.ClearAndFree(cachedTransitions); cachedCountPool.ClearAndFree(cachedCount); }
public static void Free(List <T> list) { SharedPools.Default <List <T> >().ClearAndFree(list); }
public async Task <ImmutableArray <Diagnostic> > GetSemanticDiagnosticsAsync(DiagnosticAnalyzer analyzer) { var model = await _document.GetSemanticModelAsync(_cancellationToken).ConfigureAwait(false); var compilation = model?.Compilation; Contract.ThrowIfNull(_document); using (var pooledObject = SharedPools.Default <List <Diagnostic> >().GetPooledObject()) { var diagnostics = pooledObject.Object; // Stateless semantic analyzers: // 1) ISemanticModelAnalyzer/IDocumentBasedDiagnosticAnalyzer // 2) ISymbolAnalyzer // 3) ISyntaxNodeAnalyzer _cancellationToken.ThrowIfCancellationRequested(); var documentAnalyzer = analyzer as DocumentDiagnosticAnalyzer; if (documentAnalyzer != null) { try { await documentAnalyzer.AnalyzeSemanticsAsync(_document, diagnostics.Add, _cancellationToken).ConfigureAwait(false); } catch (Exception e) when(!AnalyzerExecutor.IsCanceled(e, _cancellationToken)) { OnAnalyzerException(e, analyzer, compilation); return(ImmutableArray <Diagnostic> .Empty); } } else { var analyzerExecutor = GetAnalyzerExecutor(analyzer, compilation, diagnostics.Add); var analyzerActions = await GetAnalyzerActionsAsync(analyzer, analyzerExecutor).ConfigureAwait(false); if (analyzerActions != null) { // SemanticModel actions. if (analyzerActions.SemanticModelActionsCount > 0) { analyzerExecutor.ExecuteSemanticModelActions(analyzerActions, model); } // Symbol actions. if (analyzerActions.SymbolActionsCount > 0) { var symbols = this.GetSymbolsToAnalyze(model); analyzerExecutor.ExecuteSymbolActions(analyzerActions, symbols); } if (this.SyntaxNodeAnalyzerService != null) { // SyntaxNode actions. if (analyzerActions.SyntaxNodeActionsCount > 0) { this.SyntaxNodeAnalyzerService.ExecuteSyntaxNodeActions(analyzerActions, GetSyntaxNodesToAnalyze(), model, analyzerExecutor); } // CodeBlockStart, CodeBlock, CodeBlockEnd, and generated SyntaxNode actions. if (analyzerActions.CodeBlockStartActionsCount > 0 || analyzerActions.CodeBlockActionsCount > 0 || analyzerActions.CodeBlockEndActionsCount > 0) { this.SyntaxNodeAnalyzerService.ExecuteCodeBlockActions(analyzerActions, this.GetDeclarationInfos(model), model, analyzerExecutor); } } } } return(GetFilteredDocumentDiagnostics(diagnostics, compilation).ToImmutableArray()); } }
public static StringBuilder Allocate() { return(SharedPools.Default <StringBuilder>().AllocateAndClear()); }
private async Task <DiagnosticAnalysisResultMap <DiagnosticAnalyzer, DiagnosticAnalysisResult> > AnalyzeOutOfProcAsync( DocumentAnalysisScope?documentAnalysisScope, Project project, CompilationWithAnalyzers compilationWithAnalyzers, RemoteHostClient client, bool forceExecuteAllAnalyzers, bool logPerformanceInfo, bool getTelemetryInfo, CancellationToken cancellationToken) { var solution = project.Solution; using var pooledObject = SharedPools.Default <Dictionary <string, DiagnosticAnalyzer> >().GetPooledObject(); var analyzerMap = pooledObject.Object; var analyzers = documentAnalysisScope?.Analyzers ?? compilationWithAnalyzers.Analyzers.Where(a => forceExecuteAllAnalyzers || !a.IsOpenFileOnly(solution.Options)); analyzerMap.AppendAnalyzerMap(analyzers); if (analyzerMap.Count == 0) { return(DiagnosticAnalysisResultMap <DiagnosticAnalyzer, DiagnosticAnalysisResult> .Empty); } // Use high priority if we are force executing all analyzers for user action OR serving an active document request. var isHighPriority = forceExecuteAllAnalyzers || documentAnalysisScope != null && _documentTrackingService?.TryGetActiveDocument() == documentAnalysisScope.TextDocument.Id; var argument = new DiagnosticArguments( isHighPriority, compilationWithAnalyzers.AnalysisOptions.ReportSuppressedDiagnostics, logPerformanceInfo, getTelemetryInfo, documentAnalysisScope?.TextDocument.Id, documentAnalysisScope?.Span, documentAnalysisScope?.Kind, project.Id, analyzerMap.Keys.ToArray()); var result = await client.TryInvokeAsync <IRemoteDiagnosticAnalyzerService, SerializableDiagnosticAnalysisResults>( solution, invocation : (service, solutionInfo, cancellationToken) => service.CalculateDiagnosticsAsync(solutionInfo, argument, cancellationToken), callbackTarget : null, cancellationToken).ConfigureAwait(false); if (!result.HasValue) { return(DiagnosticAnalysisResultMap <DiagnosticAnalyzer, DiagnosticAnalysisResult> .Empty); } // handling of cancellation and exception var version = await DiagnosticIncrementalAnalyzer.GetDiagnosticVersionAsync(project, cancellationToken).ConfigureAwait(false); var documentIds = (documentAnalysisScope != null) ? ImmutableHashSet.Create(documentAnalysisScope.TextDocument.Id) : null; return(new DiagnosticAnalysisResultMap <DiagnosticAnalyzer, DiagnosticAnalysisResult>( result.Value.Diagnostics.ToImmutableDictionary( entry => analyzerMap[entry.analyzerId], entry => DiagnosticAnalysisResult.Create( project, version, syntaxLocalMap: Hydrate(entry.diagnosticMap.Syntax, project), semanticLocalMap: Hydrate(entry.diagnosticMap.Semantic, project), nonLocalMap: Hydrate(entry.diagnosticMap.NonLocal, project), others: entry.diagnosticMap.Other, documentIds)), result.Value.Telemetry.ToImmutableDictionary(entry => analyzerMap[entry.analyzerId], entry => entry.telemetry))); }
public static StringBuilder Allocate() => SharedPools.Default <StringBuilder>().AllocateAndClear();
private T GetSmallestContainingIntervalWorker(int start, int length, Func <T, int, int, bool> predicate) { var result = default(T); if (root == null || MaxEndValue(root) < start) { return(result); } int end = start + length; // * our interval tree is a binary tree that is ordered by a start position. // // this method works by // 1. find a sub tree that has biggest "start" position that is smaller than given "start" by going down right side of a tree // 2. once it encounters a right sub tree that it can't go down anymore, move down to left sub tree once and try #1 again // 3. once it gets to the position where it can't find any smaller span (both left and // right sub tree doesn't contain given span) start to check whether current node // contains the given "span" // 4. move up the spin until it finds one that contains the "span" which should be smallest span that contains the given "span" // 5. if it is going up from right side, it make sure to check left side of tree first. using (var pooledObject = SharedPools.Default <Stack <Node> >().GetPooledObject()) { var spineNodes = pooledObject.Object; spineNodes.Push(root); while (spineNodes.Count > 0) { var currentNode = spineNodes.Peek(); // only goes to right if right tree contains given span if (Introspector.GetStart(currentNode.Value) <= start) { var right = currentNode.Right; if (right != null && end < MaxEndValue(right)) { spineNodes.Push(right); continue; } } // right side, sub tree doesn't contain the given span, put current node on // stack, and move down to left sub tree var left = currentNode.Left; if (left != null && end <= MaxEndValue(left)) { spineNodes.Push(left); continue; } // we reached the point, where we can't go down anymore. // now, go back up to find best answer while (spineNodes.Count > 0) { currentNode = spineNodes.Pop(); // check whether current node meets condition if (predicate(currentNode.Value, start, length)) { // hold onto best answer if (EqualityComparer <T> .Default.Equals(result, default) || (Introspector.GetStart(result) <= Introspector.GetStart(currentNode.Value) && Introspector.GetLength(currentNode.Value) < Introspector.GetLength(result))) { result = currentNode.Value; } } // there is no parent, result we currently have is the best answer if (spineNodes.Count == 0) { return(result); } var parentNode = spineNodes.Peek(); // if we are under left side of parent node if (parentNode.Left == currentNode) { // go one level up again continue; } // okay, we are under right side of parent node if (parentNode.Right == currentNode) { // try left side of parent node if it can have better answer if (parentNode.Left != null && end <= MaxEndValue(parentNode.Left)) { // right side tree doesn't have any answer or if the right side has // an answer but left side can have better answer then try left side if (EqualityComparer <T> .Default.Equals(result, default) || Introspector.GetStart(parentNode.Value) == Introspector.GetStart(currentNode.Value)) { // put left as new root, and break out inner loop spineNodes.Push(parentNode.Left); break; } } // no left side, go one more level up continue; } } } return(result); } }
public static string ReturnAndFree(StringBuilder builder) { SharedPools.Default <StringBuilder>().ForgetTrackedObject(builder); return(builder.ToString()); }
private async Task <Project> UpdateDocumentsAsync( Project project, ProjectStateChecksums projectChecksums, IEnumerable <TextDocumentState> existingTextDocumentStates, ChecksumCollection oldChecksums, ChecksumCollection newChecksums, Func <Solution, ImmutableArray <DocumentInfo>, Solution> addDocuments, Func <Solution, ImmutableArray <DocumentId>, Solution> removeDocuments, CancellationToken cancellationToken) { using var olds = SharedPools.Default <HashSet <Checksum> >().GetPooledObject(); using var news = SharedPools.Default <HashSet <Checksum> >().GetPooledObject(); olds.Object.UnionWith(oldChecksums); news.Object.UnionWith(newChecksums); // remove documents that exist in both side olds.Object.ExceptWith(newChecksums); news.Object.ExceptWith(oldChecksums); var oldMap = await GetDocumentMapAsync(existingTextDocumentStates, olds.Object, cancellationToken).ConfigureAwait(false); var newMap = await GetDocumentMapAsync(_assetProvider, news.Object, cancellationToken).ConfigureAwait(false); // If more than two documents changed during a single update, perform a bulk synchronization on the // project to avoid large numbers of small synchronization calls during document updates. // 🔗 https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1365014 if (newMap.Count > 2) { await _assetProvider.SynchronizeProjectAssetsAsync(new[] { projectChecksums.Checksum }, cancellationToken).ConfigureAwait(false); } // added document ImmutableArray <DocumentInfo> .Builder?lazyDocumentsToAdd = null; foreach (var(documentId, newDocumentChecksums) in newMap) { if (!oldMap.ContainsKey(documentId)) { lazyDocumentsToAdd ??= ImmutableArray.CreateBuilder <DocumentInfo>(); // we have new document added var documentInfo = await _assetProvider.CreateDocumentInfoAsync(newDocumentChecksums.Checksum, cancellationToken).ConfigureAwait(false); lazyDocumentsToAdd.Add(documentInfo); } } if (lazyDocumentsToAdd != null) { project = addDocuments(project.Solution, lazyDocumentsToAdd.ToImmutable()).GetProject(project.Id) !; } // changed document foreach (var(documentId, newDocumentChecksums) in newMap) { if (!oldMap.TryGetValue(documentId, out var oldDocumentChecksums)) { continue; } Contract.ThrowIfTrue(oldDocumentChecksums.Checksum == newDocumentChecksums.Checksum); var document = project.GetDocument(documentId) ?? project.GetAdditionalDocument(documentId) ?? project.GetAnalyzerConfigDocument(documentId); Contract.ThrowIfNull(document); project = await UpdateDocumentAsync(document, oldDocumentChecksums, newDocumentChecksums, cancellationToken).ConfigureAwait(false); } // removed document ImmutableArray <DocumentId> .Builder?lazyDocumentsToRemove = null; foreach (var(documentId, _) in oldMap) { if (!newMap.ContainsKey(documentId)) { // we have a document removed lazyDocumentsToRemove ??= ImmutableArray.CreateBuilder <DocumentId>(); lazyDocumentsToRemove.Add(documentId); } } if (lazyDocumentsToRemove is not null) { project = removeDocuments(project.Solution, lazyDocumentsToRemove.ToImmutable()).GetProject(project.Id) !; } return(project); }
private static DiffResult Difference <T>(IEnumerable <ITagSpan <T> > latestSpans, IEnumerable <ITagSpan <T> > previousSpans, IEqualityComparer <T> comparer) where T : ITag { using var addedPool = SharedPools.Default <List <SnapshotSpan> >().GetPooledObject(); using var removedPool = SharedPools.Default <List <SnapshotSpan> >().GetPooledObject(); using var latestEnumerator = latestSpans.GetEnumerator(); using var previousEnumerator = previousSpans.GetEnumerator(); var added = addedPool.Object; var removed = removedPool.Object; var latest = NextOrDefault(latestEnumerator); var previous = NextOrDefault(previousEnumerator); while (latest != null && previous != null) { var latestSpan = latest.Span; var previousSpan = previous.Span; if (latestSpan.Start < previousSpan.Start) { added.Add(latestSpan); latest = NextOrDefault(latestEnumerator); } else if (previousSpan.Start < latestSpan.Start) { removed.Add(previousSpan); previous = NextOrDefault(previousEnumerator); } else { // If the starts are the same, but the ends are different, report the larger // region to be conservative. if (previousSpan.End > latestSpan.End) { removed.Add(previousSpan); latest = NextOrDefault(latestEnumerator); } else if (latestSpan.End > previousSpan.End) { added.Add(latestSpan); previous = NextOrDefault(previousEnumerator); } else { if (!comparer.Equals(latest.Tag, previous.Tag)) { added.Add(latestSpan); } latest = NextOrDefault(latestEnumerator); previous = NextOrDefault(previousEnumerator); } } } while (latest != null) { added.Add(latest.Span); latest = NextOrDefault(latestEnumerator); } while (previous != null) { removed.Add(previous.Span); previous = NextOrDefault(previousEnumerator); } return(new DiffResult(added, removed)); }
private IEnumerable <TextChange> FilterTextChanges(SourceText originalText, List <TextSpan> editorVisibleSpansInOriginal, IReadOnlyList <TextChange> changes) { // no visible spans or changes if (editorVisibleSpansInOriginal.Count == 0 || changes.Count == 0) { // return empty one yield break; } using (var pooledObject = SharedPools.Default <List <TextChange> >().GetPooledObject()) { var changeQueue = pooledObject.Object; changeQueue.AddRange(changes); var spanIndex = 0; var changeIndex = 0; for (; spanIndex < editorVisibleSpansInOriginal.Count; spanIndex++) { var visibleSpan = editorVisibleSpansInOriginal[spanIndex]; var visibleTextSpan = GetVisibleTextSpan(originalText, visibleSpan, uptoFirstAndLastLine: true); for (; changeIndex < changeQueue.Count; changeIndex++) { var change = changeQueue[changeIndex]; // easy case first if (change.Span.End < visibleSpan.Start) { // move to next change continue; } if (visibleSpan.End < change.Span.Start) { // move to next visible span break; } // make sure we are not replacing whitespace around start and at the end of visible span if (WhitespaceOnEdges(originalText, visibleTextSpan, change)) { continue; } if (visibleSpan.Contains(change.Span)) { yield return(change); continue; } // now it is complex case where things are intersecting each other var subChanges = GetSubTextChanges(originalText, change, visibleSpan).ToList(); if (subChanges.Count > 0) { if (subChanges.Count == 1 && subChanges[0] == change) { // we can't break it. not much we can do here. just don't touch and ignore this change continue; } changeQueue.InsertRange(changeIndex + 1, subChanges); continue; } } } } }
public static PooledObject <List <T> > CreateList <T>() { return(SharedPools.Default <List <T> >().GetPooledObject()); }
private static void LogKeyboardInput(NativeMethods.KEYBDINPUT input) { var isExtendedKey = (input.dwFlags & NativeMethods.KEYEVENTF_EXTENDEDKEY) != 0; var isKeyUp = (input.dwFlags & NativeMethods.KEYEVENTF_KEYUP) != 0; var isUnicode = (input.dwFlags & NativeMethods.KEYEVENTF_UNICODE) != 0; var isScanCode = (input.dwFlags & NativeMethods.KEYEVENTF_SCANCODE) != 0; if (isUnicode && input.wVk != 0) { Debug.WriteLine("UNEXPECTED: if KEYEVENTF_UNICODE flag is specified then wVk must be 0."); return; } var builder = SharedPools.Default <StringBuilder>().AllocateAndClear(); builder.Append("Send Key: "); char ch; if (isUnicode || isScanCode) { builder.Append(input.wScan.ToString("x4")); ch = (char)input.wScan; } else { builder.Append(input.wVk.ToString("x4")); ch = (char)(NativeMethods.MapVirtualKey(input.wVk, NativeMethods.MAPVK_VK_TO_CHAR) & 0x0000ffff); } // Append code and printable character builder.Append(' '); AppendPrintableChar(ch, builder); if (!isUnicode && !isScanCode && input.wVk <= byte.MaxValue) { AppendVirtualKey((byte)input.wVk, builder); } // Append flags if (input.dwFlags == 0) { builder.Append("[none]"); } else { builder.Append('['); if (isExtendedKey) { AppendFlag("extended", builder); } if (isKeyUp) { AppendFlag("key up", builder); } if (isUnicode) { AppendFlag("unicode", builder); } if (isScanCode) { AppendFlag("scan code", builder); } builder.Append(']'); } Debug.WriteLine(builder.ToString()); SharedPools.Default <StringBuilder>().ClearAndFree(builder); }
protected override async Task ExecuteAsync() { // wait for global operation such as build await GlobalOperationTask.ConfigureAwait(false); using ( var pooledObject = SharedPools .Default <List <ExpensiveAnalyzerInfo> >() .GetPooledObject() ) using ( RoslynLogger.LogBlock( FunctionId.Diagnostics_GeneratePerformaceReport, CancellationToken ) ) { _diagnosticAnalyzerPerformanceTracker.GenerateReport(pooledObject.Object); foreach (var analyzerInfo in pooledObject.Object) { var newAnalyzer = _reported.Add(analyzerInfo.AnalyzerId); var isInternalUser = _telemetrySession.IsUserMicrosoftInternal; // we only report same analyzer once unless it is internal user if (isInternalUser || newAnalyzer) { // this will report telemetry under VS. this will let us see how accurate our performance tracking is RoslynLogger.Log( FunctionId.Diagnostics_BadAnalyzer, KeyValueLogMessage.Create( m => { // since it is telemetry, we hash analyzer name if it is not builtin analyzer m[nameof(analyzerInfo.AnalyzerId)] = isInternalUser ? analyzerInfo.AnalyzerId : analyzerInfo.PIISafeAnalyzerId; m[nameof(analyzerInfo.LocalOutlierFactor)] = analyzerInfo.LocalOutlierFactor; m[nameof(analyzerInfo.Average)] = analyzerInfo.Average; m[nameof(analyzerInfo.AdjustedStandardDeviation)] = analyzerInfo.AdjustedStandardDeviation; } ) ); } // for logging, we only log once. we log here so that we can ask users to provide this log to us // when we want to find out VS performance issue that could be caused by analyzer if (newAnalyzer) { _logger.TraceEvent( TraceEventType.Warning, 0, $"Analyzer perf indicators exceeded threshold for '{analyzerInfo.AnalyzerId}' ({analyzerInfo.AnalyzerIdHash}): " + $"LOF: {analyzerInfo.LocalOutlierFactor}, Avg: {analyzerInfo.Average}, Stddev: {analyzerInfo.AdjustedStandardDeviation}" ); } } } }
public static List <T> Allocate() { return(SharedPools.Default <List <T> >().AllocateAndClear()); }
private void DeallocateStringBuilderAndAssignCapture() { _capture = _stringBuilder.ToString(); SharedPools.Default <StringBuilder>().ClearAndFree(_stringBuilder); _stringBuilder = null; }
public IFormattingRule CreateRule(Document document, int position) { var visualStudioWorkspace = document.Project.Solution.Workspace as VisualStudioWorkspaceImpl; if (visualStudioWorkspace == null) { return(_noopRule); } var containedDocument = visualStudioWorkspace.GetHostDocument(document.Id) as ContainedDocument; if (containedDocument == null) { return(_noopRule); } var textContainer = document.GetTextAsync(CancellationToken.None).WaitAndGetResult(CancellationToken.None).Container; var buffer = textContainer.TryGetTextBuffer() as IProjectionBuffer; if (buffer == null) { return(_noopRule); } using (var pooledObject = SharedPools.Default <List <TextSpan> >().GetPooledObject()) { var spans = pooledObject.Object; var root = document.GetSyntaxRootSynchronously(CancellationToken.None); var text = root.SyntaxTree.GetText(CancellationToken.None); spans.AddRange(containedDocument.GetEditorVisibleSpans()); for (var i = 0; i < spans.Count; i++) { var visibleSpan = spans[i]; if (visibleSpan.IntersectsWith(position) || visibleSpan.End == position) { return(containedDocument.GetBaseIndentationRule(root, text, spans, i)); } } // in razor (especially in @helper tag), it is possible for us to be asked for next line of visible span var line = text.Lines.GetLineFromPosition(position); if (line.LineNumber > 0) { line = text.Lines[line.LineNumber - 1]; // find one that intersects with previous line for (var i = 0; i < spans.Count; i++) { var visibleSpan = spans[i]; if (visibleSpan.IntersectsWith(line.Span)) { return(containedDocument.GetBaseIndentationRule(root, text, spans, i)); } } } throw new InvalidOperationException(); } }