public override TaintedDataAbstractValue VisitInvocation_NonLambdaOrDelegateOrLocalFunction( IMethodSymbol method, IOperation?visitedInstance, ImmutableArray <IArgumentOperation> visitedArguments, bool invokedAsDelegate, IOperation originalOperation, TaintedDataAbstractValue defaultValue) { // Always invoke base visit. TaintedDataAbstractValue result = base.VisitInvocation_NonLambdaOrDelegateOrLocalFunction( method, visitedInstance, visitedArguments, invokedAsDelegate, originalOperation, defaultValue); IEnumerable <IArgumentOperation> taintedArguments = GetTaintedArguments(visitedArguments); if (taintedArguments.Any()) { ProcessTaintedDataEnteringInvocationOrCreation(method, taintedArguments, originalOperation); } PooledHashSet <string>? taintedTargets = null; PooledHashSet <(string, string)>?taintedParameterPairs = null; PooledHashSet <(string, string)>?sanitizedParameterPairs = null; PooledHashSet <string>? taintedParameterNamesCached = null; try { IEnumerable <string> GetTaintedParameterNames() { IEnumerable <string> taintedParameterNames = visitedArguments .Where(s => this.GetCachedAbstractValue(s).Kind == TaintedDataAbstractValueKind.Tainted) .Select(s => s.Parameter.Name); if (visitedInstance != null && this.GetCachedAbstractValue(visitedInstance).Kind == TaintedDataAbstractValueKind.Tainted) { taintedParameterNames = taintedParameterNames.Concat(TaintedTargetValue.This); } return(taintedParameterNames); } taintedParameterNamesCached = PooledHashSet <string> .GetInstance(); taintedParameterNamesCached.UnionWith(GetTaintedParameterNames()); if (this.DataFlowAnalysisContext.SourceInfos.IsSourceMethod( method, visitedArguments, new Lazy <PointsToAnalysisResult?>(() => DataFlowAnalysisContext.PointsToAnalysisResult), new Lazy <(PointsToAnalysisResult?, ValueContentAnalysisResult?)>(() => (DataFlowAnalysisContext.PointsToAnalysisResult, DataFlowAnalysisContext.ValueContentAnalysisResult)), out taintedTargets)) { bool rebuildTaintedParameterNames = false; foreach (string taintedTarget in taintedTargets) { if (taintedTarget != TaintedTargetValue.Return) { IArgumentOperation argumentOperation = visitedArguments.FirstOrDefault(o => o.Parameter.Name == taintedTarget); if (argumentOperation != null) { rebuildTaintedParameterNames = true; this.CacheAbstractValue(argumentOperation, TaintedDataAbstractValue.CreateTainted(argumentOperation.Parameter, argumentOperation.Syntax, method)); } else { Debug.Fail("Are the tainted data sources misconfigured?"); } } else { result = TaintedDataAbstractValue.CreateTainted(method, originalOperation.Syntax, this.OwningSymbol); } } if (rebuildTaintedParameterNames) { taintedParameterNamesCached.Clear(); taintedParameterNamesCached.UnionWith(GetTaintedParameterNames()); } } if (this.DataFlowAnalysisContext.SourceInfos.IsSourceTransferMethod( method, visitedArguments, taintedParameterNamesCached, out taintedParameterPairs)) { foreach ((string ifTaintedParameter, string thenTaintedTarget) in taintedParameterPairs) { IOperation thenTaintedTargetOperation = visitedInstance != null && thenTaintedTarget == TaintedTargetValue.This ? visitedInstance : visitedArguments.FirstOrDefault(o => o.Parameter.Name == thenTaintedTarget); if (thenTaintedTargetOperation != null) { SetTaintedForEntity( thenTaintedTargetOperation, this.GetCachedAbstractValue( visitedInstance != null && ifTaintedParameter == TaintedTargetValue.This ? visitedInstance : visitedArguments.FirstOrDefault(o => o.Parameter.Name == ifTaintedParameter))); } else { Debug.Fail("Are the tainted data sources misconfigured?"); } } } if (visitedInstance != null && this.IsSanitizingInstanceMethod(method)) { SetTaintedForEntity(visitedInstance, TaintedDataAbstractValue.NotTainted); } if (this.IsSanitizingMethod( method, visitedArguments, taintedParameterNamesCached, out sanitizedParameterPairs)) { if (sanitizedParameterPairs.Count == 0) { // it was either sanitizing constructor or // the short form or registering sanitizer method by just the name result = TaintedDataAbstractValue.NotTainted; } else { foreach ((string ifTaintedParameter, string thenSanitizedTarget) in sanitizedParameterPairs) { if (thenSanitizedTarget == TaintedTargetValue.Return) { result = TaintedDataAbstractValue.NotTainted; continue; } IArgumentOperation thenSanitizedTargetOperation = visitedArguments.FirstOrDefault(o => o.Parameter.Name == thenSanitizedTarget); if (thenSanitizedTargetOperation != null) { SetTaintedForEntity(thenSanitizedTargetOperation, TaintedDataAbstractValue.NotTainted); } else { Debug.Fail("Are the tainted data sanitizers misconfigured?"); } } } } } finally { taintedTargets?.Dispose(); taintedParameterPairs?.Dispose(); sanitizedParameterPairs?.Dispose(); taintedParameterNamesCached?.Dispose(); } return(result); }
public SyntaxAndDeclarationManager ReplaceSyntaxTree(SyntaxTree oldTree, SyntaxTree newTree) { var state = _lazyState; var newExternalSyntaxTrees = this.ExternalSyntaxTrees.Replace(oldTree, newTree); if (state == null) { return(this.WithExternalSyntaxTrees(newExternalSyntaxTrees)); } var newLoadDirectivesSyntax = newTree.GetCompilationUnitRoot().GetLoadDirectives(); var loadDirectivesHaveChanged = !oldTree.GetCompilationUnitRoot().GetLoadDirectives().SequenceEqual(newLoadDirectivesSyntax); var syntaxTrees = state.SyntaxTrees; var ordinalMap = state.OrdinalMap; var loadDirectiveMap = state.LoadDirectiveMap; var loadedSyntaxTreeMap = state.LoadedSyntaxTreeMap; var removeSet = PooledHashSet <SyntaxTree> .GetInstance(); int totalReferencedTreeCount; ImmutableArray <LoadDirective> oldLoadDirectives; GetRemoveSet( oldTree, loadDirectivesHaveChanged, syntaxTrees, ordinalMap, loadDirectiveMap, loadedSyntaxTreeMap, removeSet, out totalReferencedTreeCount, out oldLoadDirectives); var loadDirectiveMapBuilder = loadDirectiveMap.ToBuilder(); var loadedSyntaxTreeMapBuilder = loadedSyntaxTreeMap.ToBuilder(); var declMapBuilder = state.RootNamespaces.ToBuilder(); var declTable = state.DeclarationTable; foreach (var tree in removeSet) { loadDirectiveMapBuilder.Remove(tree); loadedSyntaxTreeMapBuilder.Remove(tree.FilePath); RemoveSyntaxTreeFromDeclarationMapAndTable(tree, declMapBuilder, ref declTable); } removeSet.Free(); var oldOrdinal = ordinalMap[oldTree]; ImmutableArray <SyntaxTree> newTrees; if (loadDirectivesHaveChanged) { // Should have been removed above... Debug.Assert(!loadDirectiveMapBuilder.ContainsKey(oldTree)); Debug.Assert(!loadDirectiveMapBuilder.ContainsKey(newTree)); // If we're inserting new #load'ed trees, we'll rebuild // the whole syntaxTree array and the ordinalMap. var treesBuilder = ArrayBuilder <SyntaxTree> .GetInstance(); var ordinalMapBuilder = PooledDictionary <SyntaxTree, int> .GetInstance(); for (var i = 0; i <= (oldOrdinal - totalReferencedTreeCount); i++) { var tree = syntaxTrees[i]; treesBuilder.Add(tree); ordinalMapBuilder.Add(tree, i); } AppendAllSyntaxTrees( treesBuilder, newTree, this.ScriptClassName, this.Resolver, this.MessageProvider, this.IsSubmission, ordinalMapBuilder, loadDirectiveMapBuilder, loadedSyntaxTreeMapBuilder, declMapBuilder, ref declTable); for (var i = oldOrdinal + 1; i < syntaxTrees.Length; i++) { var tree = syntaxTrees[i]; if (!IsLoadedSyntaxTree(tree, loadedSyntaxTreeMap)) { UpdateSyntaxTreesAndOrdinalMapOnly( treesBuilder, tree, ordinalMapBuilder, loadDirectiveMap, loadedSyntaxTreeMap); } } newTrees = treesBuilder.ToImmutableAndFree(); ordinalMap = ordinalMapBuilder.ToImmutableDictionaryAndFree(); Debug.Assert(newTrees.Length == ordinalMap.Count); } else { AddSyntaxTreeToDeclarationMapAndTable(newTree, this.ScriptClassName, this.IsSubmission, declMapBuilder, ref declTable); if (newLoadDirectivesSyntax.Any()) { // If load directives have not changed and there are new directives, // then there should have been (matching) old directives as well. Debug.Assert(!oldLoadDirectives.IsDefault); Debug.Assert(!oldLoadDirectives.IsEmpty); Debug.Assert(oldLoadDirectives.Length == newLoadDirectivesSyntax.Count); loadDirectiveMapBuilder[newTree] = oldLoadDirectives; } Debug.Assert(ordinalMap.ContainsKey(oldTree)); // Checked by RemoveSyntaxTreeFromDeclarationMapAndTable newTrees = syntaxTrees.SetItem(oldOrdinal, newTree); ordinalMap = ordinalMap.Remove(oldTree); ordinalMap = ordinalMap.SetItem(newTree, oldOrdinal); } state = new State( newTrees, ordinalMap, loadDirectiveMapBuilder.ToImmutable(), loadedSyntaxTreeMapBuilder.ToImmutable(), declMapBuilder.ToImmutable(), declTable); return(new SyntaxAndDeclarationManager( newExternalSyntaxTrees, this.ScriptClassName, this.Resolver, this.MessageProvider, this.IsSubmission, state)); }
/// <summary> /// If the extension method is applicable based on the "this" argument type, return /// the method constructed with the inferred type arguments. If the method is not an /// unconstructed generic method, type inference is skipped. If the method is not /// applicable, or if constraints when inferring type parameters from the "this" type /// are not satisfied, the return value is null. /// </summary> /// <param name="compilation">Compilation used to check constraints. The latest language version is assumed if this is null.</param> private static MethodSymbol InferExtensionMethodTypeArguments(MethodSymbol method, TypeSymbol thisType, CSharpCompilation compilation, ref HashSet <DiagnosticInfo> useSiteDiagnostics) { Debug.Assert(method.IsExtensionMethod); Debug.Assert((object)thisType != null); if (!method.IsGenericMethod || method != method.ConstructedFrom) { return(method); } // We never resolve extension methods on a dynamic receiver. if (thisType.IsDynamic()) { return(null); } var containingAssembly = method.ContainingAssembly; var errorNamespace = containingAssembly.GlobalNamespace; var conversions = new TypeConversions(containingAssembly.CorLibrary); // There is absolutely no plausible syntax/tree that we could use for these // synthesized literals. We could be speculatively binding a call to a PE method. var syntaxTree = CSharpSyntaxTree.Dummy; var syntax = (CSharpSyntaxNode)syntaxTree.GetRoot(); // Create an argument value for the "this" argument of specific type, // and pass the same bad argument value for all other arguments. var thisArgumentValue = new BoundLiteral(syntax, ConstantValue.Bad, thisType) { WasCompilerGenerated = true }; var otherArgumentType = new ExtendedErrorTypeSymbol(errorNamespace, name: string.Empty, arity: 0, errorInfo: null, unreported: false); var otherArgumentValue = new BoundLiteral(syntax, ConstantValue.Bad, otherArgumentType) { WasCompilerGenerated = true }; var paramCount = method.ParameterCount; var arguments = new BoundExpression[paramCount]; for (int i = 0; i < paramCount; i++) { var argument = (i == 0) ? thisArgumentValue : otherArgumentValue; arguments[i] = argument; } var typeArgs = MethodTypeInferrer.InferTypeArgumentsFromFirstArgument( conversions, method, arguments.AsImmutable(), useSiteDiagnostics: ref useSiteDiagnostics); if (typeArgs.IsDefault) { return(null); } // For the purpose of constraint checks we use error type symbol in place of type arguments that we couldn't infer from the first argument. // This prevents constraint checking from failing for corresponding type parameters. int firstNullInTypeArgs = -1; var notInferredTypeParameters = PooledHashSet <TypeParameterSymbol> .GetInstance(); var typeParams = method.TypeParameters; var typeArgsForConstraintsCheck = typeArgs; for (int i = 0; i < typeArgsForConstraintsCheck.Length; i++) { if (!typeArgsForConstraintsCheck[i].HasType) { firstNullInTypeArgs = i; var builder = ArrayBuilder <TypeWithAnnotations> .GetInstance(); builder.AddRange(typeArgsForConstraintsCheck, firstNullInTypeArgs); for (; i < typeArgsForConstraintsCheck.Length; i++) { var typeArg = typeArgsForConstraintsCheck[i]; if (!typeArg.HasType) { notInferredTypeParameters.Add(typeParams[i]); builder.Add(TypeWithAnnotations.Create(ErrorTypeSymbol.UnknownResultType)); } else { builder.Add(typeArg); } } typeArgsForConstraintsCheck = builder.ToImmutableAndFree(); break; } } // Check constraints. var diagnosticsBuilder = ArrayBuilder <TypeParameterDiagnosticInfo> .GetInstance(); var substitution = new TypeMap(typeParams, typeArgsForConstraintsCheck); ArrayBuilder <TypeParameterDiagnosticInfo> useSiteDiagnosticsBuilder = null; var success = method.CheckConstraints(conversions, substitution, typeParams, typeArgsForConstraintsCheck, compilation, diagnosticsBuilder, nullabilityDiagnosticsBuilderOpt: null, ref useSiteDiagnosticsBuilder, ignoreTypeConstraintsDependentOnTypeParametersOpt: notInferredTypeParameters.Count > 0 ? notInferredTypeParameters : null); diagnosticsBuilder.Free(); notInferredTypeParameters.Free(); if (useSiteDiagnosticsBuilder != null && useSiteDiagnosticsBuilder.Count > 0) { if (useSiteDiagnostics == null) { useSiteDiagnostics = new HashSet <DiagnosticInfo>(); } foreach (var diag in useSiteDiagnosticsBuilder) { useSiteDiagnostics.Add(diag.DiagnosticInfo); } } if (!success) { return(null); } // For the purpose of construction we use original type parameters in place of type arguments that we couldn't infer from the first argument. ImmutableArray <TypeWithAnnotations> typeArgsForConstruct = typeArgs; if (typeArgs.Any(t => !t.HasType)) { typeArgsForConstruct = typeArgs.ZipAsArray( method.TypeParameters, (t, tp) => t.HasType ? t : TypeWithAnnotations.Create(tp)); } return(method.Construct(typeArgsForConstruct)); }
/// <summary> /// Create the optimized plan for the location of lambda methods and whether scopes need access to parent scopes /// </summary> internal void ComputeLambdaScopesAndFrameCaptures() { RemoveUnneededReferences(); LambdaScopes = new Dictionary <MethodSymbol, BoundNode>(ReferenceEqualityComparer.Instance); NeedsParentFrame = new HashSet <BoundNode>(); VisitClosures(ScopeTree, (scope, closure) => { if (closure.CapturedVariables.Count > 0) { (Scope innermost, Scope outermost) = FindLambdaScopeRange(closure, scope); RecordClosureScope(innermost, outermost, closure); } }); (Scope innermost, Scope outermost) FindLambdaScopeRange(Closure closure, Scope closureScope) { Scope innermost = null; Scope outermost = null; var capturedVars = PooledHashSet <Symbol> .GetInstance(); capturedVars.AddAll(closure.CapturedVariables); // If any of the captured variables are local functions we'll need // to add the captured variables of that local function to the current // set. This has the effect of ensuring that if the local function // captures anything "above" the current scope then parent frame // is itself captured (so that the current lambda can call that // local function). foreach (var captured in closure.CapturedVariables) { if (captured is LocalFunctionSymbol localFunc) { var(found, _) = GetVisibleClosure(closureScope, localFunc); capturedVars.AddAll(found.CapturedVariables); } } for (var curScope = closureScope; curScope != null && capturedVars.Count > 0; curScope = curScope.Parent) { if (!(capturedVars.Overlaps(curScope.DeclaredVariables) || capturedVars.Overlaps(curScope.Closures.Select(c => c.OriginalMethodSymbol)))) { continue; } outermost = curScope; if (innermost == null) { innermost = curScope; } capturedVars.RemoveAll(curScope.DeclaredVariables); capturedVars.RemoveAll(curScope.Closures.Select(c => c.OriginalMethodSymbol)); } // If any captured variables are left, they're captured above method scope if (capturedVars.Count > 0) { outermost = null; } capturedVars.Free(); return(innermost, outermost); } void RecordClosureScope(Scope innermost, Scope outermost, Closure closure) { // 1) if there is innermost scope, lambda goes there as we cannot go any higher. // 2) scopes in [innermostScope, outermostScope) chain need to have access to the parent scope. // // Example: // if a lambda captures a method's parameter and `this`, // its innermost scope depth is 0 (method locals and parameters) // and outermost scope is -1 // Such lambda will be placed in a closure frame that corresponds to the method's outer block // and this frame will also lift original `this` as a field when created by its parent. // Note that it is completely irrelevant how deeply the lexical scope of the lambda was originally nested. if (innermost != null) { LambdaScopes.Add(closure.OriginalMethodSymbol, innermost.BoundNode); // Disable struct closures on methods converted to delegates, as well as on async and iterator methods. var markAsNoStruct = !CanTakeRefParameters(closure.OriginalMethodSymbol); if (markAsNoStruct) { ScopesThatCantBeStructs.Add(innermost.BoundNode); } while (innermost != outermost) { NeedsParentFrame.Add(innermost.BoundNode); innermost = innermost.Parent; if (markAsNoStruct && innermost != null) { ScopesThatCantBeStructs.Add(innermost.BoundNode); } } } } }
private void StopTrackingDataForEntity(AnalysisEntity analysisEntity, PooledHashSet <AnalysisEntity> allEntities) => StopTrackingDataForEntity(analysisEntity, CurrentAnalysisData, allEntities);
private void StopTrackingDataForParamArrayParameterIndices(AnalysisEntity analysisEntity, TAnalysisData analysisData, PooledHashSet <AnalysisEntity> allEntities) { Debug.Assert(analysisEntity.Symbol is IParameterSymbol parameter && parameter.IsParams); foreach (var entity in allEntities) { if (!entity.Indices.IsEmpty && entity.InstanceLocation.Equals(analysisEntity.InstanceLocation)) { StopTrackingEntity(entity, analysisData); } } }
private async Task FixAllValueAssignedIsUnusedDiagnosticsAsync( IOrderedEnumerable <Diagnostic> diagnostics, Document document, SemanticModel semanticModel, SyntaxNode root, SyntaxNode containingMemberDeclaration, UnusedValuePreference preference, bool removeAssignments, UniqueVariableNameGenerator nameGenerator, SyntaxEditor editor, ISyntaxFactsService syntaxFacts, CancellationToken cancellationToken) { // This method applies the code fix for diagnostics reported for unused value assignments to local/parameter. // The actual code fix depends on whether or not the right hand side of the assignment has side effects. // For example, if the right hand side is a constant or a reference to a local/parameter, then it has no side effects. // The lack of side effects is indicated by the "removeAssignments" parameter for this function. // If the right hand side has no side effects, then we can replace the assignments with variable declarations that have no initializer // or completely remove the statement. // If the right hand side does have side effects, we replace the identifier token for unused value assignment with // a new identifier token (either discard '_' or new unused local variable name). // For both the above cases, if the original diagnostic was reported on a local declaration, i.e. redundant initialization // at declaration, then we also add a new variable declaration statement without initializer for this local. var nodeReplacementMap = PooledDictionary <SyntaxNode, SyntaxNode> .GetInstance(); var nodesToRemove = PooledHashSet <SyntaxNode> .GetInstance(); var nodesToAdd = PooledHashSet <(TLocalDeclarationStatementSyntax declarationStatement, SyntaxNode node)> .GetInstance(); // Indicates if the node's trivia was processed. var processedNodes = PooledHashSet <SyntaxNode> .GetInstance(); var candidateDeclarationStatementsForRemoval = PooledHashSet <TLocalDeclarationStatementSyntax> .GetInstance(); var hasAnyUnusedLocalAssignment = false; try { foreach (var(node, isUnusedLocalAssignment) in GetNodesToFix()) { hasAnyUnusedLocalAssignment |= isUnusedLocalAssignment; var declaredLocal = semanticModel.GetDeclaredSymbol(node, cancellationToken) as ILocalSymbol; if (declaredLocal == null && node.Parent is TCatchStatementSyntax) { declaredLocal = semanticModel.GetDeclaredSymbol(node.Parent, cancellationToken) as ILocalSymbol; } string newLocalNameOpt = null; if (removeAssignments) { // Removable assignment or initialization, such that right hand side has no side effects. if (declaredLocal != null) { // Redundant initialization. // For example, "int a = 0;" var variableDeclarator = node.FirstAncestorOrSelf <TVariableDeclaratorSyntax>(); Debug.Assert(variableDeclarator != null); nodesToRemove.Add(variableDeclarator); // Local declaration statement containing the declarator might be a candidate for removal if all its variables get marked for removal. candidateDeclarationStatementsForRemoval.Add(variableDeclarator.GetAncestor <TLocalDeclarationStatementSyntax>()); } else { // Redundant assignment or increment/decrement. if (syntaxFacts.IsOperandOfIncrementOrDecrementExpression(node)) { // For example, C# increment operation "a++;" Debug.Assert(node.Parent.Parent is TExpressionStatementSyntax); nodesToRemove.Add(node.Parent.Parent); } else { Debug.Assert(syntaxFacts.IsLeftSideOfAnyAssignment(node)); if (node.Parent is TStatementSyntax) { // For example, VB simple assignment statement "a = 0" nodesToRemove.Add(node.Parent); } else if (node.Parent is TExpressionSyntax && node.Parent.Parent is TExpressionStatementSyntax) { // For example, C# simple assignment statement "a = 0;" nodesToRemove.Add(node.Parent.Parent); } else { // For example, C# nested assignment statement "a = b = 0;", where assignment to 'b' is redundant. // We replace the node with "a = 0;" nodeReplacementMap.Add(node.Parent, syntaxFacts.GetRightHandSideOfAssignment(node.Parent)); } } } } else { // Value initialization/assignment where the right hand side may have side effects, // and hence needs to be preserved in fixed code. // For example, "x = MethodCall();" is replaced with "_ = MethodCall();" or "var unused = MethodCall();" // Replace the flagged variable's indentifier token with new named, based on user's preference. var newNameToken = preference == UnusedValuePreference.DiscardVariable ? editor.Generator.Identifier(AbstractRemoveUnusedParametersAndValuesDiagnosticAnalyzer.DiscardVariableName) : nameGenerator.GenerateUniqueNameAtSpanStart(node); newLocalNameOpt = newNameToken.ValueText; var newNameNode = TryUpdateNameForFlaggedNode(node, newNameToken); if (newNameNode == null) { continue; } // Is this is compound assignment? if (syntaxFacts.IsLeftSideOfAnyAssignment(node) && !syntaxFacts.IsLeftSideOfAssignment(node)) { // Compound assignment is changed to simple assignment. // For example, "x += MethodCall();", where assignment to 'x' is redundant // is replaced with "_ = MethodCall();" or "var unused = MethodCall();" nodeReplacementMap.Add(node.Parent, GetReplacementNodeForCompoundAssignment(node.Parent, newNameNode, editor, syntaxFacts)); } else { nodeReplacementMap.Add(node, newNameNode); } } if (declaredLocal != null) { // We have a dead initialization for a local declaration. // Introduce a new local declaration statement without an initializer for this local. var declarationStatement = CreateLocalDeclarationStatement(declaredLocal.Type, declaredLocal.Name); if (isUnusedLocalAssignment) { declarationStatement = declarationStatement.WithAdditionalAnnotations(s_unusedLocalDeclarationAnnotation); } nodesToAdd.Add((declarationStatement, node)); } else { // We have a dead assignment to a local/parameter, which is not at the declaration site. // Create a new local declaration for the unused local if both following conditions are met: // 1. User prefers unused local variables for unused value assignment AND // 2. Assignment value has side effects and hence cannot be removed. if (preference == UnusedValuePreference.UnusedLocalVariable && !removeAssignments) { var type = semanticModel.GetTypeInfo(node, cancellationToken).Type; Debug.Assert(type != null); Debug.Assert(newLocalNameOpt != null); var declarationStatement = CreateLocalDeclarationStatement(type, newLocalNameOpt); nodesToAdd.Add((declarationStatement, node)); } } } // Process candidate declaration statements for removal. foreach (var localDeclarationStatement in candidateDeclarationStatementsForRemoval) { // If all the variable declarators for the local declaration statement are being removed, // we can remove the entire local declaration statement. if (ShouldRemoveStatement(localDeclarationStatement, out var variables)) { nodesToRemove.Add(localDeclarationStatement); nodesToRemove.RemoveRange(variables); } } foreach (var(declarationStatement, node) in nodesToAdd) { InsertLocalDeclarationStatement(declarationStatement, node); } if (hasAnyUnusedLocalAssignment) { // Local declaration statements with no initializer, but non-zero references are candidates for removal // if the code fix removes all these references. // We annotate such declaration statements with no initializer abd non-zero references here // and remove them in post process document pass later, if the code fix did remove all these references. foreach (var localDeclarationStatement in containingMemberDeclaration.DescendantNodes().OfType <TLocalDeclarationStatementSyntax>()) { var variables = syntaxFacts.GetVariablesOfLocalDeclarationStatement(localDeclarationStatement); if (variables.Count == 1 && syntaxFacts.GetInitializerOfVariableDeclarator(variables[0]) == null && !(await IsLocalDeclarationWithNoReferencesAsync(localDeclarationStatement, document, cancellationToken).ConfigureAwait(false))) { nodeReplacementMap.Add(localDeclarationStatement, localDeclarationStatement.WithAdditionalAnnotations(s_existingLocalDeclarationWithoutInitializerAnnotation)); } } } foreach (var node in nodesToRemove) { var removeOptions = SyntaxGenerator.DefaultRemoveOptions; // If the leading trivia was not added to a new node, process it now. if (!processedNodes.Contains(node)) { // Don't keep trivia if the node is part of a multiple declaration statement. // e.g. int x = 0, y = 0, z = 0; any white space left behind can cause problems if the declaration gets split apart. var containingDeclaration = node.GetAncestor <TLocalDeclarationStatementSyntax>(); if (containingDeclaration != null && candidateDeclarationStatementsForRemoval.Contains(containingDeclaration)) { removeOptions = SyntaxRemoveOptions.KeepNoTrivia; } else { removeOptions |= SyntaxRemoveOptions.KeepLeadingTrivia; } } editor.RemoveNode(node, removeOptions); } foreach (var kvp in nodeReplacementMap) { editor.ReplaceNode(kvp.Key, kvp.Value.WithAdditionalAnnotations(Formatter.Annotation)); } } finally { nodeReplacementMap.Free(); nodesToRemove.Free(); nodesToAdd.Free(); processedNodes.Free(); } return; // Local functions. IEnumerable <(SyntaxNode node, bool isUnusedLocalAssignment)> GetNodesToFix() { foreach (var diagnostic in diagnostics) { var node = root.FindNode(diagnostic.Location.SourceSpan, getInnermostNodeForTie: true); var isUnusedLocalAssignment = AbstractRemoveUnusedParametersAndValuesDiagnosticAnalyzer.GetIsUnusedLocalDiagnostic(diagnostic); yield return(node, isUnusedLocalAssignment); } } // Mark generated local declaration statement with: // 1. "s_newLocalDeclarationAnnotation" for post processing in "MoveNewLocalDeclarationsNearReference" below. // 2. Simplifier annotation so that 'var'/explicit type is correctly added based on user options. TLocalDeclarationStatementSyntax CreateLocalDeclarationStatement(ITypeSymbol type, string name) => (TLocalDeclarationStatementSyntax)editor.Generator.LocalDeclarationStatement(type, name) .WithLeadingTrivia(editor.Generator.ElasticCarriageReturnLineFeed) .WithAdditionalAnnotations(s_newLocalDeclarationStatementAnnotation, Simplifier.Annotation); void InsertLocalDeclarationStatement(TLocalDeclarationStatementSyntax declarationStatement, SyntaxNode node) { // Find the correct place to insert the given declaration statement based on the node's ancestors. var insertionNode = node.FirstAncestorOrSelf <SyntaxNode>(n => n.Parent is TSwitchCaseBlockSyntax || syntaxFacts.IsExecutableBlock(n.Parent) && !(n is TCatchStatementSyntax) && !(n is TCatchBlockSyntax)); if (insertionNode is TSwitchCaseLabelOrClauseSyntax) { InsertAtStartOfSwitchCaseBlockForDeclarationInCaseLabelOrClause(insertionNode.GetAncestor <TSwitchCaseBlockSyntax>(), editor, declarationStatement); } else if (insertionNode is TStatementSyntax) { // If the insertion node is being removed, keep the leading trivia with the new declaration. if (nodesToRemove.Contains(insertionNode) && !processedNodes.Contains(insertionNode)) { declarationStatement = declarationStatement.WithLeadingTrivia(insertionNode.GetLeadingTrivia()); // Mark the node as processed so that the trivia only gets added once. processedNodes.Add(insertionNode); } editor.InsertBefore(insertionNode, declarationStatement); } } bool ShouldRemoveStatement(TLocalDeclarationStatementSyntax localDeclarationStatement, out SeparatedSyntaxList <SyntaxNode> variables) { Debug.Assert(removeAssignments); // We should remove the entire local declaration statement if all its variables are marked for removal. variables = syntaxFacts.GetVariablesOfLocalDeclarationStatement(localDeclarationStatement); foreach (var variable in variables) { if (!nodesToRemove.Contains(variable)) { return(false); } } return(true); } }
/// <summary> /// Used when iterating through base types in contexts in which the caller needs to avoid cycles and can't use BaseType /// (perhaps because BaseType is in the process of being computed) /// </summary> /// <param name="type"></param> /// <param name="basesBeingResolved"></param> /// <param name="compilation"></param> /// <param name="visited"></param> /// <returns></returns> internal static TypeSymbol GetNextBaseTypeNoUseSiteDiagnostics(this TypeSymbol type, ConsList <Symbol> basesBeingResolved, CSharpCompilation compilation, ref PooledHashSet <NamedTypeSymbol> visited) { switch (type.TypeKind) { case TypeKind.TypeParameter: return(((TypeParameterSymbol)type).EffectiveBaseClassNoUseSiteDiagnostics); case TypeKind.Class: case TypeKind.Struct: case TypeKind.Error: case TypeKind.Interface: return(GetNextDeclaredBase((NamedTypeSymbol)type, basesBeingResolved, compilation, ref visited)); default: // Enums and delegates know their own base types // intrinsically (and do not include interface lists) // so there is not the possibility of a cycle. return(type.BaseTypeNoUseSiteDiagnostics); } }
public override DictionaryAnalysisData <AnalysisEntity, TValue> Merge(DictionaryAnalysisData <AnalysisEntity, TValue> map1, DictionaryAnalysisData <AnalysisEntity, TValue> map2) { AssertValidAnalysisData(map1); AssertValidAnalysisData(map2); var resultMap = new DictionaryAnalysisData <AnalysisEntity, TValue>(); using var newKeys = PooledHashSet <AnalysisEntity> .GetInstance(); using var valuesToMergeBuilder = ArrayBuilder <TValue> .GetInstance(5); var map2LookupIgnoringInstanceLocation = map2.Keys.Where(IsAnalysisEntityForFieldOrProperty) .ToLookup(entity => entity.EqualsIgnoringInstanceLocationId); foreach (var entry1 in map1) { AnalysisEntity key1 = entry1.Key; TValue value1 = entry1.Value; if (map2LookupIgnoringInstanceLocation.Count > 0 && IsAnalysisEntityForFieldOrProperty(key1)) { var equivalentKeys2 = map2LookupIgnoringInstanceLocation[key1.EqualsIgnoringInstanceLocationId]; if (!equivalentKeys2.Any()) { TValue mergedValue = GetMergedValueForEntityPresentInOneMap(key1, value1); Debug.Assert(!map2.ContainsKey(key1)); Debug.Assert(ValueDomain.Compare(value1, mergedValue) <= 0); AddNewEntryToResultMap(key1, mergedValue); continue; } foreach (AnalysisEntity key2 in equivalentKeys2) { // Confirm that key2 and key1 are indeed EqualsIgnoringInstanceLocation // This ensures that we handle hash code clashes of EqualsIgnoringInstanceLocationId. if (!key1.EqualsIgnoringInstanceLocation(key2)) { continue; } TValue value2 = map2[key2]; valuesToMergeBuilder.Clear(); valuesToMergeBuilder.Add(value1); valuesToMergeBuilder.Add(value2); if (key1.InstanceLocation.Equals(key2.InstanceLocation)) { var mergedValue = GetMergedValue(valuesToMergeBuilder); AddNewEntryToResultMap(key1, mergedValue); } else { if (key1.SymbolOpt == null || !Equals(key1.SymbolOpt, key2.SymbolOpt)) { // PERF: Do not add a new key-value pair to the resultMap for unrelated entities or non-symbol based entities. continue; } AnalysisEntity mergedKey = key1.WithMergedInstanceLocation(key2); var isExistingKeyInInput = false; var isExistingKeyInResult = false; if (resultMap.TryGetValue(mergedKey, out var existingValue)) { valuesToMergeBuilder.Add(existingValue); isExistingKeyInResult = true; } if (map1.TryGetValue(mergedKey, out existingValue)) { valuesToMergeBuilder.Add(existingValue); isExistingKeyInInput = true; } if (map2.TryGetValue(mergedKey, out existingValue)) { valuesToMergeBuilder.Add(existingValue); isExistingKeyInInput = true; } var isCandidateToBeSkipped = !isExistingKeyInInput && !isExistingKeyInResult; if (isCandidateToBeSkipped && CanSkipNewEntity(mergedKey)) { // PERF: Do not add a new key-value pair to the resultMap if the key is not reachable from tracked entities and PointsTo values. continue; } var mergedValue = GetMergedValue(valuesToMergeBuilder); Debug.Assert(ValueDomain.Compare(value1, mergedValue) <= 0); Debug.Assert(ValueDomain.Compare(value2, mergedValue) <= 0); if (isCandidateToBeSkipped && CanSkipNewEntry(mergedKey, mergedValue)) { // PERF: Do not add a new key-value pair to the resultMap if the value can be skipped. continue; } if (!isExistingKeyInInput) { newKeys.Add(mergedKey); } AddNewEntryToResultMap(mergedKey, mergedValue, isNewKey: !isExistingKeyInInput); } } } else if (map2.TryGetValue(key1, out var value2)) { TValue mergedValue = ValueDomain.Merge(value1, value2); Debug.Assert(ValueDomain.Compare(value1, mergedValue) <= 0); Debug.Assert(ValueDomain.Compare(value2, mergedValue) <= 0); AddNewEntryToResultMap(key1, mergedValue); continue; } if (!resultMap.ContainsKey(key1)) { TValue mergedValue = GetMergedValueForEntityPresentInOneMap(key1, value1); Debug.Assert(ValueDomain.Compare(value1, mergedValue) <= 0); AddNewEntryToResultMap(key1, mergedValue); } } foreach (var kvp in map2) { var key2 = kvp.Key; var value2 = kvp.Value; if (!resultMap.ContainsKey(key2)) { TValue mergedValue = GetMergedValueForEntityPresentInOneMap(key2, value2); Debug.Assert(ValueDomain.Compare(value2, mergedValue) <= 0); AddNewEntryToResultMap(key2, mergedValue); } } foreach (var newKey in newKeys) { Debug.Assert(!map1.ContainsKey(newKey)); Debug.Assert(!map2.ContainsKey(newKey)); var value = resultMap[newKey]; if (ReferenceEquals(value, GetDefaultValue(newKey))) { resultMap.Remove(newKey); } else { OnNewMergedValue(value); } } Debug.Assert(Compare(map1, resultMap) <= 0); Debug.Assert(Compare(map2, resultMap) <= 0); AssertValidAnalysisData(resultMap); return(resultMap);
private void OnSymbolEnd(SymbolAnalysisContext symbolEndContext, bool hasUnsupportedOperation) { if (hasUnsupportedOperation) { return; } if (symbolEndContext.Symbol.GetAttributes().Any(a => a.AttributeClass == _structLayoutAttributeType)) { // Bail out for types with 'StructLayoutAttribute' as the ordering of the members is critical, // and removal of unused members might break semantics. return; } // Report diagnostics for unused candidate members. var first = true; PooledHashSet <ISymbol> symbolsReferencedInDocComments = null; ArrayBuilder <string> debuggerDisplayAttributeArguments = null; try { var namedType = (INamedTypeSymbol)symbolEndContext.Symbol; foreach (var member in namedType.GetMembers()) { // Check if the underlying member is neither read nor a readable reference to the member is taken. // If so, we flag the member as either unused (never written) or unread (written but not read). if (TryRemove(member, out var valueUsageInfo) && !valueUsageInfo.IsReadFrom()) { Debug.Assert(IsCandidateSymbol(member)); Debug.Assert(!member.IsImplicitlyDeclared); if (first) { // Bail out if there are syntax errors in any of the declarations of the containing type. // Note that we check this only for the first time that we report an unused or unread member for the containing type. if (HasSyntaxErrors(namedType, symbolEndContext.CancellationToken)) { return; } // Compute the set of candidate symbols referenced in all the documentation comments within the named type declarations. // This set is computed once and used for all the iterations of the loop. symbolsReferencedInDocComments = GetCandidateSymbolsReferencedInDocComments(namedType, symbolEndContext.Compilation, symbolEndContext.CancellationToken); // Compute the set of string arguments to DebuggerDisplay attributes applied to any symbol within the named type declaration. // These strings may have an embedded reference to the symbol. // This set is computed once and used for all the iterations of the loop. debuggerDisplayAttributeArguments = GetDebuggerDisplayAttributeArguments(namedType); first = false; } // Simple heuristic for members referenced in DebuggerDisplayAttribute's string argument: // bail out if any of the DebuggerDisplay string arguments contains the member name. // In future, we can consider improving this heuristic to parse the embedded expression // and resolve symbol references. if (debuggerDisplayAttributeArguments.Any(arg => arg.Contains(member.Name))) { continue; } // Report IDE0051 or IDE0052 based on whether the underlying member has any Write/WritableRef/NonReadWriteRef references or not. var rule = !valueUsageInfo.IsWrittenTo() && !valueUsageInfo.IsNameOnly() && !symbolsReferencedInDocComments.Contains(member) ? s_removeUnusedMembersRule : s_removeUnreadMembersRule; // Do not flag write-only properties that are not read. // Write-only properties are assumed to have side effects // visible through other means than a property getter. if (rule == s_removeUnreadMembersRule && member is IPropertySymbol property && property.IsWriteOnly) { continue; } // Most of the members should have a single location, except for partial methods. // We report the diagnostic on the first location of the member. var diagnostic = DiagnosticHelper.CreateWithMessage( rule, member.Locations[0], rule.GetEffectiveSeverity(symbolEndContext.Compilation.Options), additionalLocations: null, properties: null, GetMessage(rule, member)); symbolEndContext.ReportDiagnostic(diagnostic); } } } finally { symbolsReferencedInDocComments?.Free(); debuggerDisplayAttributeArguments?.Free(); } return; }
internal BoundExpression SetInferredTypeWithAnnotations( TypeWithAnnotations type, Binder?binderOpt, BindingDiagnosticBag?diagnosticsOpt ) { Debug.Assert(binderOpt != null || type.HasType); Debug.Assert( this.Syntax.Kind() == SyntaxKind.SingleVariableDesignation || ( this.Syntax.Kind() == SyntaxKind.DeclarationExpression && ((DeclarationExpressionSyntax)this.Syntax).Designation.Kind() == SyntaxKind.SingleVariableDesignation ) ); bool inferenceFailed = !type.HasType; if (inferenceFailed) { type = TypeWithAnnotations.Create(binderOpt !.CreateErrorType("var")); } switch (this.VariableSymbol.Kind) { case SymbolKind.Local: var localSymbol = (SourceLocalSymbol)this.VariableSymbol; if (diagnosticsOpt?.DiagnosticBag != null) { if (inferenceFailed) { ReportInferenceFailure(diagnosticsOpt); } else { SyntaxNode typeOrDesignationSyntax = this.Syntax.Kind() == SyntaxKind.DeclarationExpression ? ((DeclarationExpressionSyntax)this.Syntax).Type : this.Syntax; Binder.CheckRestrictedTypeInAsyncMethod( localSymbol.ContainingSymbol, type.Type, diagnosticsOpt, typeOrDesignationSyntax ); } } localSymbol.SetTypeWithAnnotations(type); return(new BoundLocal( this.Syntax, localSymbol, BoundLocalDeclarationKind.WithInferredType, constantValueOpt: null, isNullableUnknown: false, type: type.Type, hasErrors: this.HasErrors || inferenceFailed ).WithWasConverted()); case SymbolKind.Field: var fieldSymbol = (GlobalExpressionVariable)this.VariableSymbol; var inferenceDiagnostics = new BindingDiagnosticBag( DiagnosticBag.GetInstance() #if DEBUG , PooledHashSet <AssemblySymbol> .GetInstance() #endif ); if (inferenceFailed) { ReportInferenceFailure(inferenceDiagnostics); } type = fieldSymbol.SetTypeWithAnnotations(type, inferenceDiagnostics); #if DEBUG Debug.Assert(inferenceDiagnostics.DependenciesBag is object); Debug.Assert(inferenceDiagnostics.DependenciesBag.Count == 0); #endif inferenceDiagnostics.Free(); return(new BoundFieldAccess( this.Syntax, this.ReceiverOpt, fieldSymbol, null, LookupResultKind.Viable, isDeclaration: true, type: type.Type, hasErrors: this.HasErrors || inferenceFailed )); default: throw ExceptionUtilities.UnexpectedValue(this.VariableSymbol.Kind); } }
private static TBlockAnalysisData RunCore( ImmutableArray <BasicBlock> blocks, DataFlowAnalyzer <TBlockAnalysisData> analyzer, int firstBlockOrdinal, int lastBlockOrdinal, TBlockAnalysisData initialAnalysisData, ArrayBuilder <BasicBlock> unreachableBlocksToVisit, SortedSet <int> outOfRangeBlocksToVisit, PooledDictionary <ControlFlowRegion, bool> continueDispatchAfterFinally, PooledHashSet <ControlFlowRegion> dispatchedExceptionsFromRegions, CancellationToken cancellationToken) { var toVisit = new SortedSet <int>(); var firstBlock = blocks[firstBlockOrdinal]; analyzer.SetCurrentAnalysisData(firstBlock, initialAnalysisData); toVisit.Add(firstBlock.Ordinal); var processedBlocks = PooledHashSet <BasicBlock> .GetInstance(); TBlockAnalysisData resultAnalysisData = default; do { cancellationToken.ThrowIfCancellationRequested(); BasicBlock current; if (toVisit.Count > 0) { var min = toVisit.Min; toVisit.Remove(min); current = blocks[min]; } else { int index; current = null; for (index = 0; index < unreachableBlocksToVisit.Count; index++) { var unreachableBlock = unreachableBlocksToVisit[index]; if (unreachableBlock.Ordinal >= firstBlockOrdinal && unreachableBlock.Ordinal <= lastBlockOrdinal) { current = unreachableBlock; break; } } if (current == null) { continue; } unreachableBlocksToVisit.RemoveAt(index); if (processedBlocks.Contains(current)) { // Already processed from a branch from another unreachable block. continue; } analyzer.SetCurrentAnalysisData(current, analyzer.GetEmptyAnalysisData()); } if (current.Ordinal < firstBlockOrdinal || current.Ordinal > lastBlockOrdinal) { outOfRangeBlocksToVisit.Add(current.Ordinal); continue; } if (current.Ordinal == current.EnclosingRegion.FirstBlockOrdinal) { // We are revisiting first block of a region, so we need to again dispatch exceptions from region. dispatchedExceptionsFromRegions.Remove(current.EnclosingRegion); } TBlockAnalysisData fallThroughAnalysisData = analyzer.AnalyzeBlock(current, cancellationToken); bool fallThroughSuccessorIsReachable = true; if (current.ConditionKind != ControlFlowConditionKind.None) { TBlockAnalysisData conditionalSuccessorAnalysisData; (fallThroughAnalysisData, conditionalSuccessorAnalysisData) = analyzer.AnalyzeConditionalBranch(current, fallThroughAnalysisData, cancellationToken); bool conditionalSuccesorIsReachable = true; if (current.BranchValue.ConstantValue.HasValue && current.BranchValue.ConstantValue.Value is bool constant) { if (constant == (current.ConditionKind == ControlFlowConditionKind.WhenTrue)) { fallThroughSuccessorIsReachable = false; } else { conditionalSuccesorIsReachable = false; } } if (conditionalSuccesorIsReachable || analyzer.AnalyzeUnreachableBlocks) { FollowBranch(current, current.ConditionalSuccessor, conditionalSuccessorAnalysisData); } } else { fallThroughAnalysisData = analyzer.AnalyzeNonConditionalBranch(current, fallThroughAnalysisData, cancellationToken); } if (fallThroughSuccessorIsReachable || analyzer.AnalyzeUnreachableBlocks) { ControlFlowBranch branch = current.FallThroughSuccessor; FollowBranch(current, branch, fallThroughAnalysisData); if (current.EnclosingRegion.Kind == ControlFlowRegionKind.Finally && current.Ordinal == lastBlockOrdinal) { continueDispatchAfterFinally[current.EnclosingRegion] = branch.Semantics != ControlFlowBranchSemantics.Throw && branch.Semantics != ControlFlowBranchSemantics.Rethrow && current.FallThroughSuccessor.Semantics == ControlFlowBranchSemantics.StructuredExceptionHandling; } } if (current.Ordinal == lastBlockOrdinal) { resultAnalysisData = fallThroughAnalysisData; } // We are using very simple approach: // If try block is reachable, we should dispatch an exception from it, even if it is empty. // To simplify implementation, we dispatch exception from every reachable basic block and rely // on dispatchedExceptionsFromRegions cache to avoid doing duplicate work. DispatchException(current.EnclosingRegion); processedBlocks.Add(current); }while (toVisit.Count != 0 || unreachableBlocksToVisit.Count != 0); return(resultAnalysisData); // Local functions. void FollowBranch(BasicBlock current, ControlFlowBranch branch, TBlockAnalysisData currentAnalsisData) { if (branch == null) { return; } switch (branch.Semantics) { case ControlFlowBranchSemantics.None: case ControlFlowBranchSemantics.ProgramTermination: case ControlFlowBranchSemantics.StructuredExceptionHandling: case ControlFlowBranchSemantics.Throw: case ControlFlowBranchSemantics.Rethrow: case ControlFlowBranchSemantics.Error: Debug.Assert(branch.Destination == null); return; case ControlFlowBranchSemantics.Regular: case ControlFlowBranchSemantics.Return: Debug.Assert(branch.Destination != null); if (StepThroughFinally(current.EnclosingRegion, branch.Destination, ref currentAnalsisData)) { var destination = branch.Destination; var currentDestinationData = analyzer.GetCurrentAnalysisData(destination); var mergedAnalysisData = analyzer.Merge(currentDestinationData, currentAnalsisData, cancellationToken); // We need to analyze the destination block if both the following conditions are met: // 1. Either the current block is reachable both destination and current are non-reachable // 2. Either the new analysis data for destination has changed or destination block hasn't // been processed. if ((current.IsReachable || !destination.IsReachable) && (!analyzer.IsEqual(currentDestinationData, mergedAnalysisData) || !processedBlocks.Contains(destination))) { analyzer.SetCurrentAnalysisData(destination, mergedAnalysisData); toVisit.Add(branch.Destination.Ordinal); } } return; default: throw ExceptionUtilities.UnexpectedValue(branch.Semantics); } } // Returns whether we should proceed to the destination after finallies were taken care of. bool StepThroughFinally(ControlFlowRegion region, BasicBlock destination, ref TBlockAnalysisData currentAnalysisData) { int destinationOrdinal = destination.Ordinal; while (!region.ContainsBlock(destinationOrdinal)) { Debug.Assert(region.Kind != ControlFlowRegionKind.Root); ControlFlowRegion enclosing = region.EnclosingRegion; if (region.Kind == ControlFlowRegionKind.Try && enclosing.Kind == ControlFlowRegionKind.TryAndFinally) { Debug.Assert(enclosing.NestedRegions[0] == region); Debug.Assert(enclosing.NestedRegions[1].Kind == ControlFlowRegionKind.Finally); if (!StepThroughSingleFinally(enclosing.NestedRegions[1], ref currentAnalysisData)) { // The point that continues dispatch is not reachable. Cancel the dispatch. return(false); } } region = enclosing; } return(true); } // Returns whether we should proceed with dispatch after finally was taken care of. bool StepThroughSingleFinally(ControlFlowRegion @finally, ref TBlockAnalysisData currentAnalysisData) { Debug.Assert(@finally.Kind == ControlFlowRegionKind.Finally); var previousAnalysisData = analyzer.GetCurrentAnalysisData(blocks[@finally.FirstBlockOrdinal]); var mergedAnalysisData = analyzer.Merge(previousAnalysisData, currentAnalysisData, cancellationToken); if (!analyzer.IsEqual(previousAnalysisData, mergedAnalysisData)) { // For simplicity, we do a complete walk of the finally/filter region in isolation // to make sure that the resume dispatch point is reachable from its beginning. // It could also be reachable through invalid branches into the finally and we don't want to consider // these cases for regular finally handling. currentAnalysisData = RunCore(blocks, analyzer, @finally.FirstBlockOrdinal, @finally.LastBlockOrdinal, mergedAnalysisData, unreachableBlocksToVisit, outOfRangeBlocksToVisit: toVisit, continueDispatchAfterFinally, dispatchedExceptionsFromRegions, cancellationToken); } if (!continueDispatchAfterFinally.TryGetValue(@finally, out bool dispatch)) { dispatch = false; continueDispatchAfterFinally.Add(@finally, false); } return(dispatch); } void DispatchException(ControlFlowRegion fromRegion) { do { if (!dispatchedExceptionsFromRegions.Add(fromRegion)) { return; } ControlFlowRegion enclosing = fromRegion.Kind == ControlFlowRegionKind.Root ? null : fromRegion.EnclosingRegion; if (fromRegion.Kind == ControlFlowRegionKind.Try) { switch (enclosing.Kind) { case ControlFlowRegionKind.TryAndFinally: Debug.Assert(enclosing.NestedRegions[0] == fromRegion); Debug.Assert(enclosing.NestedRegions[1].Kind == ControlFlowRegionKind.Finally); var currentAnalysisData = analyzer.GetCurrentAnalysisData(blocks[fromRegion.FirstBlockOrdinal]); if (!StepThroughSingleFinally(enclosing.NestedRegions[1], ref currentAnalysisData)) { // The point that continues dispatch is not reachable. Cancel the dispatch. return; } break; case ControlFlowRegionKind.TryAndCatch: Debug.Assert(enclosing.NestedRegions[0] == fromRegion); DispatchExceptionThroughCatches(enclosing, startAt: 1); break; default: throw ExceptionUtilities.UnexpectedValue(enclosing.Kind); } } else if (fromRegion.Kind == ControlFlowRegionKind.Filter) { // If filter throws, dispatch is resumed at the next catch with an original exception Debug.Assert(enclosing.Kind == ControlFlowRegionKind.FilterAndHandler); ControlFlowRegion tryAndCatch = enclosing.EnclosingRegion; Debug.Assert(tryAndCatch.Kind == ControlFlowRegionKind.TryAndCatch); int index = tryAndCatch.NestedRegions.IndexOf(enclosing, startIndex: 1); if (index > 0) { DispatchExceptionThroughCatches(tryAndCatch, startAt: index + 1); fromRegion = tryAndCatch; continue; } throw ExceptionUtilities.Unreachable; } fromRegion = enclosing; }while (fromRegion != null); } void DispatchExceptionThroughCatches(ControlFlowRegion tryAndCatch, int startAt) { // For simplicity, we do not try to figure out whether a catch clause definitely // handles all exceptions. Debug.Assert(tryAndCatch.Kind == ControlFlowRegionKind.TryAndCatch); Debug.Assert(startAt > 0); Debug.Assert(startAt <= tryAndCatch.NestedRegions.Length); for (int i = startAt; i < tryAndCatch.NestedRegions.Length; i++) { ControlFlowRegion @catch = tryAndCatch.NestedRegions[i]; switch (@catch.Kind) { case ControlFlowRegionKind.Catch: toVisit.Add(@catch.FirstBlockOrdinal); break; case ControlFlowRegionKind.FilterAndHandler: BasicBlock entryBlock = blocks[@catch.FirstBlockOrdinal]; Debug.Assert(@catch.NestedRegions[0].Kind == ControlFlowRegionKind.Filter); Debug.Assert(entryBlock.Ordinal == @catch.NestedRegions[0].FirstBlockOrdinal); toVisit.Add(entryBlock.Ordinal); break; default: throw ExceptionUtilities.UnexpectedValue(@catch.Kind); } } } }
private TAnalysisResult Run(TAnalysisContext analysisContext) { var cfg = analysisContext.ControlFlowGraph; var resultBuilder = new DataFlowAnalysisResultBuilder <TAnalysisData>(); var uniqueSuccessors = PooledHashSet <BasicBlock> .GetInstance(); var finallyBlockSuccessorsMap = PooledDictionary <int, List <BranchWithInfo> > .GetInstance(); var catchBlockInputDataMap = PooledDictionary <ControlFlowRegion, TAnalysisData> .GetInstance(); var inputDataFromInfeasibleBranchesMap = PooledDictionary <int, TAnalysisData> .GetInstance(); var unreachableBlocks = PooledHashSet <int> .GetInstance(); var worklist = new SortedSet <int>(); var pendingBlocksNeedingAtLeastOnePass = new SortedSet <int>(cfg.Blocks.Select(b => b.Ordinal)); // Map from Ordinal -> (Ordinal, ControlFlowConditionKind)? with following semantics: // 1. Key is a valid basic block ordinal. // 2. Value tuple indicates the following: // a. Non-null tuple value: Indicates a unique branch entering the block, with following tuple values: // i. Ordinal of the single unique block from which analysis data has been transferred into the Key, // which is normally a predecessor but can be a non-predecessor block for finally/catch. // ii. ControlFlowConditionKind indicating the nature of branch, i.e. conditional or fall through. // This is required as CFG can have both conditional and fall through branches // with the same source and destination blocks. // b. Null tuple value: Block had analysis data flowing into it from multiple different branches. // // This map allows us to optimize the number of merge operations. We can avoid merge and directly // overwrite analysis data into a successor if successor block has no entry or entry with non-null tuple value // with the matching input branch. var blockToUniqueInputFlowMap = PooledDictionary <int, (int Ordinal, ControlFlowConditionKind BranchKind)?> .GetInstance(); // Map from basic block ordinals that are destination of back edge(s) to the minimum block ordinal that dominates it, // i.e. for every '{key, value}' pair in the dictionary, 'key' is the destination of at least one back edge // and 'value' is the minimum ordinal such that there is no back edge to 'key' from any basic block with ordinal > 'value'. var loopRangeMap = PooledDictionary <int, int> .GetInstance(); ComputeLoopRangeMap(cfg, loopRangeMap); TAnalysisData normalPathsExitBlockData = null, exceptionPathsExitBlockDataOpt = null; try { // Add each basic block to the result. foreach (var block in cfg.Blocks) { resultBuilder.Add(block); } var entry = cfg.GetEntry(); // Initialize the input of the entry block. // For context sensitive inter-procedural analysis, use the provided initial analysis data. // Otherwise, initialize with the default bottom value of the analysis domain. var initialAnalysisDataOpt = analysisContext.InterproceduralAnalysisDataOpt?.InitialAnalysisData; UpdateInput(resultBuilder, entry, GetClonedAnalysisDataOrEmptyData(initialAnalysisDataOpt)); // Add the block to the worklist. worklist.Add(entry.Ordinal); RunCore(cfg, worklist, pendingBlocksNeedingAtLeastOnePass, initialAnalysisDataOpt, resultBuilder, uniqueSuccessors, finallyBlockSuccessorsMap, catchBlockInputDataMap, inputDataFromInfeasibleBranchesMap, blockToUniqueInputFlowMap, loopRangeMap, exceptionPathsAnalysisPostPass: false); normalPathsExitBlockData = resultBuilder.ExitBlockOutputData; if (analysisContext.ExceptionPathsAnalysis) { // Clone and save exit block data normalPathsExitBlockData = AnalysisDomain.Clone(normalPathsExitBlockData); OperationVisitor.ExecutingExceptionPathsAnalysisPostPass = true; foreach (var block in cfg.Blocks) { blockToUniqueInputFlowMap[block.Ordinal] = null; // Skip entry block analysis. if (block.Kind == BasicBlockKind.Entry) { continue; } if (block.IsReachable) { worklist.Add(block.Ordinal); } else { pendingBlocksNeedingAtLeastOnePass.Add(block.Ordinal); } } RunCore(cfg, worklist, pendingBlocksNeedingAtLeastOnePass, initialAnalysisDataOpt, resultBuilder, uniqueSuccessors, finallyBlockSuccessorsMap, catchBlockInputDataMap, inputDataFromInfeasibleBranchesMap, blockToUniqueInputFlowMap, loopRangeMap, exceptionPathsAnalysisPostPass: true); exceptionPathsExitBlockDataOpt = resultBuilder.ExitBlockOutputData; OperationVisitor.ExecutingExceptionPathsAnalysisPostPass = false; } var mergedDataForUnhandledThrowOperationsOpt = OperationVisitor.GetMergedDataForUnhandledThrowOperations(); var dataflowAnalysisResult = resultBuilder.ToResult(ToBlockResult, OperationVisitor.GetStateMap(), OperationVisitor.GetPredicateValueKindMap(), OperationVisitor.GetReturnValueAndPredicateKind(), OperationVisitor.InterproceduralResultsMap, resultBuilder.EntryBlockOutputData, normalPathsExitBlockData, exceptionPathsExitBlockDataOpt, mergedDataForUnhandledThrowOperationsOpt, OperationVisitor.AnalysisDataForUnhandledThrowOperations, cfg, OperationVisitor.ValueDomain.UnknownOrMayBeValue); return(ToResult(analysisContext, dataflowAnalysisResult)); } finally { resultBuilder.Dispose(); uniqueSuccessors.Free(); finallyBlockSuccessorsMap.Free(); catchBlockInputDataMap.Values.Dispose(); catchBlockInputDataMap.Free(); inputDataFromInfeasibleBranchesMap.Values.Dispose(); inputDataFromInfeasibleBranchesMap.Free(); unreachableBlocks.Free(); blockToUniqueInputFlowMap.Free(); loopRangeMap.Free(); } }
private static TypeSymbol GetNextDeclaredBase(NamedTypeSymbol type, ConsList<Symbol> basesBeingResolved, CSharpCompilation compilation, ref PooledHashSet<NamedTypeSymbol> visited) { // We shouldn't have visited this type earlier. Debug.Assert(visited == null || !visited.Contains(type.OriginalDefinition)); if (basesBeingResolved != null && basesBeingResolved.ContainsReference(type.OriginalDefinition)) { return null; } if (type.SpecialType == SpecialType.System_Object) { type.SetKnownToHaveNoDeclaredBaseCycles(); return null; } var nextType = type.GetDeclaredBaseType(basesBeingResolved); // types with no declared bases inherit object's members if ((object)nextType == null) { SetKnownToHaveNoDeclaredBaseCycles(ref visited); return GetDefaultBaseOrNull(type, compilation); } var origType = type.OriginalDefinition; if (nextType.KnownToHaveNoDeclaredBaseCycles) { origType.SetKnownToHaveNoDeclaredBaseCycles(); SetKnownToHaveNoDeclaredBaseCycles(ref visited); } else { // start cycle tracking visited = visited ?? PooledHashSet<NamedTypeSymbol>.GetInstance(); visited.Add(origType); if (visited.Contains(nextType.OriginalDefinition)) { return GetDefaultBaseOrNull(type, compilation); } } return nextType; }
public override TaintedDataAbstractValue VisitInvocation_NonLambdaOrDelegateOrLocalFunction( IMethodSymbol method, IOperation?visitedInstance, ImmutableArray <IArgumentOperation> visitedArguments, bool invokedAsDelegate, IOperation originalOperation, TaintedDataAbstractValue defaultValue) { // Always invoke base visit. TaintedDataAbstractValue result = base.VisitInvocation_NonLambdaOrDelegateOrLocalFunction( method, visitedInstance, visitedArguments, invokedAsDelegate, originalOperation, defaultValue); IEnumerable <IArgumentOperation> taintedArguments = GetTaintedArguments(visitedArguments); if (taintedArguments.Any()) { ProcessTaintedDataEnteringInvocationOrCreation(method, taintedArguments, originalOperation); } PooledHashSet <string>? taintedTargets = null; PooledHashSet <(string, string)>?taintedParameterPairs = null; try { if (this.IsSanitizingMethod(method)) { result = TaintedDataAbstractValue.NotTainted; } else if (visitedInstance != null && this.IsSanitizingInstanceMethod(method)) { result = TaintedDataAbstractValue.NotTainted; SetTaintedForEntity(visitedInstance, result); } else if (this.DataFlowAnalysisContext.SourceInfos.IsSourceMethod( method, visitedArguments, new Lazy <PointsToAnalysisResult?>(() => DataFlowAnalysisContext.PointsToAnalysisResultOpt), new Lazy <(PointsToAnalysisResult?, ValueContentAnalysisResult?)>(() => (DataFlowAnalysisContext.PointsToAnalysisResultOpt, DataFlowAnalysisContext.ValueContentAnalysisResultOpt)), out taintedTargets)) { foreach (string taintedTarget in taintedTargets) { if (taintedTarget != TaintedTargetValue.Return) { IArgumentOperation argumentOperation = visitedArguments.FirstOrDefault(o => o.Parameter.Name == taintedTarget); if (argumentOperation != null) { this.CacheAbstractValue(argumentOperation, TaintedDataAbstractValue.CreateTainted(argumentOperation.Parameter, argumentOperation.Syntax, method)); } else { Debug.Fail("Are the tainted data sources misconfigured?"); } } else { result = TaintedDataAbstractValue.CreateTainted(method, originalOperation.Syntax, this.OwningSymbol); } } } if (this.DataFlowAnalysisContext.SourceInfos.IsSourceTransferMethod( method, visitedArguments, visitedArguments .Where(s => this.GetCachedAbstractValue(s).Kind == TaintedDataAbstractValueKind.Tainted) .Select(s => s.Parameter.Name) .ToImmutableArray(), out taintedParameterPairs)) { foreach ((string ifTaintedParameter, string thenTaintedTarget) in taintedParameterPairs) { IArgumentOperation thenTaintedTargetOperation = visitedArguments.FirstOrDefault(o => o.Parameter.Name == thenTaintedTarget); if (thenTaintedTargetOperation != null) { SetTaintedForEntity( thenTaintedTargetOperation, this.GetCachedAbstractValue( visitedArguments.FirstOrDefault(o => o.Parameter.Name == ifTaintedParameter))); } else { Debug.Fail("Are the tainted data sources misconfigured?"); } } } } finally { taintedTargets?.Free(); taintedParameterPairs?.Free(); } return(result); }
public static Imports FromSyntax( CSharpSyntaxNode declarationSyntax, InContainerBinder binder, ConsList <Symbol> basesBeingResolved, bool inUsing) { SyntaxList <UsingDirectiveSyntax> usingDirectives; SyntaxList <ExternAliasDirectiveSyntax> externAliasDirectives; if (declarationSyntax.Kind() == SyntaxKind.CompilationUnit) { var compilation = (CompilationUnitSyntax)declarationSyntax; // using directives are not in scope within using directives usingDirectives = inUsing ? default(SyntaxList <UsingDirectiveSyntax>) : compilation.Usings; externAliasDirectives = compilation.Externs; } else if (declarationSyntax.Kind() == SyntaxKind.NamespaceDeclaration) { var namespaceDecl = (NamespaceDeclarationSyntax)declarationSyntax; // using directives are not in scope within using directives usingDirectives = inUsing ? default(SyntaxList <UsingDirectiveSyntax>) : namespaceDecl.Usings; externAliasDirectives = namespaceDecl.Externs; } else { return(Empty); } if (usingDirectives.Count == 0 && externAliasDirectives.Count == 0) { return(Empty); } // define all of the extern aliases first. They may used by the target of a using // using Bar=Foo::Bar; // using Foo::Baz; // extern alias Foo; var diagnostics = new DiagnosticBag(); var externAliases = BuildExternAliases(externAliasDirectives, binder, diagnostics); var usings = ArrayBuilder <NamespaceOrTypeAndUsingDirective> .GetInstance(); ImmutableDictionary <string, AliasAndUsingDirective> .Builder usingAliases = null; if (usingDirectives.Count > 0) { // A binder that contains the extern aliases but not the usings. The resolution of the target of a using directive or alias // should not make use of other peer usings. var usingsBinder = binder.IsSubmissionClass ? // Top-level usings in interactive code are resolved in the context of global namespace, w/o extern aliases: new InContainerBinder(binder.Compilation.GlobalNamespace, new BuckStopsHereBinder(binder.Compilation)) : new InContainerBinder(binder.Container, binder.Next, new Imports(binder.Compilation, ImmutableDictionary <string, AliasAndUsingDirective> .Empty, ImmutableArray <NamespaceOrTypeAndUsingDirective> .Empty, externAliases, null)); var uniqueUsings = PooledHashSet <NamespaceOrTypeSymbol> .GetInstance(); foreach (var usingDirective in usingDirectives) { binder.Compilation.RecordImport(usingDirective); if (usingDirective.Alias != null) { if (usingDirective.StaticKeyword != default(SyntaxToken)) { diagnostics.Add(ErrorCode.ERR_NoAliasHere, usingDirective.Alias.Name.Location); } string identifierValueText = usingDirective.Alias.Name.Identifier.ValueText; if (usingAliases != null && usingAliases.ContainsKey(identifierValueText)) { // Suppress diagnostics if we're already broken. if (!usingDirective.Name.IsMissing) { // The using alias '{0}' appeared previously in this namespace diagnostics.Add(ErrorCode.ERR_DuplicateAlias, usingDirective.Alias.Name.Location, identifierValueText); } } else { // an O(m*n) algorithm here but n (number of extern aliases) will likely be very small. foreach (var externAlias in externAliases) { if (externAlias.Alias.Name == identifierValueText) { // The using alias '{0}' appeared previously in this namespace diagnostics.Add(ErrorCode.ERR_DuplicateAlias, usingDirective.Location, identifierValueText); break; } } if (usingAliases == null) { usingAliases = ImmutableDictionary.CreateBuilder <string, AliasAndUsingDirective>(); } // construct the alias sym with the binder for which we are building imports. That // way the alias target can make use of extern alias definitions. usingAliases.Add(identifierValueText, new AliasAndUsingDirective(new AliasSymbol(usingsBinder, usingDirective), usingDirective)); } } else { if (usingDirective.Name.IsMissing) { //don't try to lookup namespaces inserted by parser error recovery continue; } var declarationBinder = usingsBinder.WithAdditionalFlags(BinderFlags.SuppressConstraintChecks); var imported = declarationBinder.BindNamespaceOrTypeSymbol(usingDirective.Name, diagnostics, basesBeingResolved); if (imported.Kind == SymbolKind.Namespace) { if (usingDirective.StaticKeyword != default(SyntaxToken)) { diagnostics.Add(ErrorCode.ERR_BadUsingType, usingDirective.Name.Location, imported); } else if (uniqueUsings.Contains(imported)) { diagnostics.Add(ErrorCode.WRN_DuplicateUsing, usingDirective.Name.Location, imported); } else { uniqueUsings.Add(imported); usings.Add(new NamespaceOrTypeAndUsingDirective(imported, usingDirective)); } } else if (imported.Kind == SymbolKind.NamedType) { if (usingDirective.StaticKeyword == default(SyntaxToken)) { diagnostics.Add(ErrorCode.ERR_BadUsingNamespace, usingDirective.Name.Location, imported); } else { var importedType = (NamedTypeSymbol)imported; if (uniqueUsings.Contains(importedType)) { diagnostics.Add(ErrorCode.WRN_DuplicateUsing, usingDirective.Name.Location, importedType); } else { uniqueUsings.Add(importedType); usings.Add(new NamespaceOrTypeAndUsingDirective(importedType, usingDirective)); } } } else if (imported.Kind != SymbolKind.ErrorType) { // Do not report additional error if the symbol itself is erroneous. // error: '<symbol>' is a '<symbol kind>' but is used as 'type or namespace' diagnostics.Add(ErrorCode.ERR_BadSKknown, usingDirective.Name.Location, usingDirective.Name, imported.GetKindText(), MessageID.IDS_SK_TYPE_OR_NAMESPACE.Localize()); } } } uniqueUsings.Free(); } if (diagnostics.IsEmptyWithoutResolution) { diagnostics = null; } return(new Imports(binder.Compilation, usingAliases.ToImmutableDictionaryOrEmpty(), usings.ToImmutableAndFree(), externAliases, diagnostics)); }
protected override void AddTrackedEntities(TaintedDataAnalysisData analysisData, PooledHashSet <AnalysisEntity> builder, bool forInterproceduralAnalysis) => analysisData.AddTrackedEntities(builder);
private static TypeSymbol GetNextDeclaredBase(NamedTypeSymbol type, ConsList <Symbol> basesBeingResolved, CSharpCompilation compilation, ref PooledHashSet <NamedTypeSymbol> visited) { // We shouldn't have visited this type earlier. Debug.Assert(visited == null || !visited.Contains(type.OriginalDefinition)); if (basesBeingResolved != null && basesBeingResolved.ContainsReference(type.OriginalDefinition)) { return(null); } if (type.SpecialType == SpecialType.System_Object) { type.SetKnownToHaveNoDeclaredBaseCycles(); return(null); } var nextType = type.GetDeclaredBaseType(basesBeingResolved); // types with no declared bases inherit object's members if ((object)nextType == null) { SetKnownToHaveNoDeclaredBaseCycles(ref visited); return(GetDefaultBaseOrNull(type, compilation)); } var origType = type.OriginalDefinition; if (nextType.KnownToHaveNoDeclaredBaseCycles) { origType.SetKnownToHaveNoDeclaredBaseCycles(); SetKnownToHaveNoDeclaredBaseCycles(ref visited); } else { // start cycle tracking visited = visited ?? PooledHashSet <NamedTypeSymbol> .GetInstance(); visited.Add(origType); if (visited.Contains(nextType.OriginalDefinition)) { return(GetDefaultBaseOrNull(type, compilation)); } } return(nextType); }
private bool GetUserDefinedOperators(UnaryOperatorKind kind, bool isChecked, BoundExpression operand, ArrayBuilder <UnaryOperatorAnalysisResult> results, ref CompoundUseSiteInfo <AssemblySymbol> useSiteInfo) { Debug.Assert(operand != null); if ((object)operand.Type == null) { // If the operand has no type -- because it is a null reference or a lambda or a method group -- // there is no way we can determine what type to search for user-defined operators. return(false); } // Spec 7.3.5 Candidate user-defined operators // SPEC: Given a type T and an operation op(A) ... the set of candidate user-defined // SPEC: operators provided by T for op(A) is determined as follows: // SPEC: If T is a nullable type then T0 is its underlying type; otherwise T0 is T. // SPEC: For all operator declarations in T0 and all lifted forms of such operators, if // SPEC: at least one operator is applicable with respect to A then the set of candidate // SPEC: operators consists of all such applicable operators. Otherwise, if T0 is object // SPEC: then the set of candidate operators is empty. Otherwise, the set of candidate // SPEC: operators is the set provided by the direct base class of T0, or the effective // SPEC: base class of T0 if T0 is a type parameter. // https://github.com/dotnet/roslyn/issues/34451: The spec quote should be adjusted to cover operators from interfaces as well. // From https://github.com/dotnet/csharplang/blob/main/meetings/2017/LDM-2017-06-27.md: // - We only even look for operator implementations in interfaces if one of the operands has a type that is an interface or // a type parameter with a non-empty effective base interface list. // - The applicable operators from classes / structs shadow those in interfaces.This matters for constrained type parameters: // the effective base class can shadow operators from effective base interfaces. // - If we find an applicable candidate in an interface, that candidate shadows all applicable operators in base interfaces: // we stop looking. TypeSymbol type0 = operand.Type.StrippedType(); TypeSymbol constrainedToTypeOpt = type0 as TypeParameterSymbol; // Searching for user-defined operators is expensive; let's take an early out if we can. if (OperatorFacts.DefinitelyHasNoUserDefinedOperators(type0)) { return(false); } var operators = ArrayBuilder <UnaryOperatorSignature> .GetInstance(); bool hadApplicableCandidates = false; NamedTypeSymbol current = type0 as NamedTypeSymbol; if ((object)current == null) { current = type0.BaseTypeWithDefinitionUseSiteDiagnostics(ref useSiteInfo); } if ((object)current == null && type0.IsTypeParameter()) { current = ((TypeParameterSymbol)type0).EffectiveBaseClass(ref useSiteInfo); } for (; (object)current != null; current = current.BaseTypeWithDefinitionUseSiteDiagnostics(ref useSiteInfo)) { operators.Clear(); GetUserDefinedUnaryOperatorsFromType(constrainedToTypeOpt, current, kind, isChecked, operators); results.Clear(); if (CandidateOperators(isChecked, operators, operand, results, ref useSiteInfo)) { hadApplicableCandidates = true; break; } } // Look in base interfaces, or effective interfaces for type parameters if (!hadApplicableCandidates) { ImmutableArray <NamedTypeSymbol> interfaces = default; if (type0.IsInterfaceType()) { interfaces = type0.AllInterfacesWithDefinitionUseSiteDiagnostics(ref useSiteInfo); } else if (type0.IsTypeParameter()) { interfaces = ((TypeParameterSymbol)type0).AllEffectiveInterfacesWithDefinitionUseSiteDiagnostics(ref useSiteInfo); } if (!interfaces.IsDefaultOrEmpty) { var shadowedInterfaces = PooledHashSet <NamedTypeSymbol> .GetInstance(); var resultsFromInterface = ArrayBuilder <UnaryOperatorAnalysisResult> .GetInstance(); results.Clear(); foreach (NamedTypeSymbol @interface in interfaces) { if ([email protected]) { // this code could be reachable in error situations continue; } if (shadowedInterfaces.Contains(@interface)) { // this interface is "shadowed" by a derived interface continue; } operators.Clear(); resultsFromInterface.Clear(); GetUserDefinedUnaryOperatorsFromType(constrainedToTypeOpt, @interface, kind, isChecked, operators); if (CandidateOperators(isChecked, operators, operand, resultsFromInterface, ref useSiteInfo)) { hadApplicableCandidates = true; results.AddRange(resultsFromInterface); // this interface "shadows" all its base interfaces shadowedInterfaces.AddAll(@interface.AllInterfacesWithDefinitionUseSiteDiagnostics(ref useSiteInfo)); } } shadowedInterfaces.Free(); resultsFromInterface.Free(); } } operators.Free(); return(hadApplicableCandidates); }
private void StopTrackingDataForEntity(AnalysisEntity analysisEntity, TAnalysisData analysisData, PooledHashSet <AnalysisEntity> allEntities) { if (!allEntities.Contains(analysisEntity)) { return; } // Stop tracking entity that is now out of scope. StopTrackingEntity(analysisEntity, analysisData); // Additionally, stop tracking all the child entities if the entity type has value copy semantics. if (analysisEntity.Type.HasValueCopySemantics()) { foreach (var childEntity in GetChildAnalysisEntities(analysisEntity, allEntities)) { StopTrackingEntity(childEntity, analysisData); } } }
/// <summary> /// Determine if "type" inherits from or implements "baseType", ignoring constructed types, and dealing /// only with original types. /// </summary> private static bool InheritsFromOrImplementsIgnoringConstruction( this TypeSymbol type, NamedTypeSymbol baseType, CSharpCompilation compilation, ref HashSet <DiagnosticInfo> useSiteDiagnostics, ConsList <TypeSymbol> basesBeingResolved = null) { Debug.Assert(type.IsDefinition); Debug.Assert(baseType.IsDefinition); PooledHashSet <NamedTypeSymbol> interfacesLookedAt = null; ArrayBuilder <NamedTypeSymbol> baseInterfaces = null; bool baseTypeIsInterface = baseType.IsInterface; if (baseTypeIsInterface) { interfacesLookedAt = PooledHashSet <NamedTypeSymbol> .GetInstance(); baseInterfaces = ArrayBuilder <NamedTypeSymbol> .GetInstance(); } PooledHashSet <NamedTypeSymbol> visited = null; var current = type; bool result = false; while ((object)current != null) { if (baseTypeIsInterface == current.IsInterfaceType() && current.Equals(baseType)) { result = true; break; } if (baseTypeIsInterface) { getBaseInterfaces(current, baseInterfaces, interfacesLookedAt, basesBeingResolved); } // NOTE(cyrusn): The base type of an 'original' type may not be 'original'. i.e. // "class Goo : IBar<int>". We must map it back to the 'original' when as we walk up // the base type hierarchy. var next = current.GetNextBaseTypeNoUseSiteDiagnostics(basesBeingResolved, compilation, ref visited); if ((object)next == null) { current = null; } else { current = (TypeSymbol)next.OriginalDefinition; current.AddUseSiteDiagnostics(ref useSiteDiagnostics); } } visited?.Free(); if (!result && baseTypeIsInterface) { Debug.Assert(!result); while (baseInterfaces.Count != 0) { NamedTypeSymbol currentBase = baseInterfaces.Pop(); if (!currentBase.IsInterface) { continue; } if (currentBase.Equals(baseType)) { result = true; break; } getBaseInterfaces(currentBase, baseInterfaces, interfacesLookedAt, basesBeingResolved); } if (!result) { foreach (var candidate in interfacesLookedAt) { candidate.AddUseSiteDiagnostics(ref useSiteDiagnostics); } } } interfacesLookedAt?.Free(); baseInterfaces?.Free(); return(result);
public override void Initialize(AnalysisContext context) { context.EnableConcurrentExecution(); context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.Analyze | GeneratedCodeAnalysisFlags.ReportDiagnostics); context.RegisterCompilationStartAction( (CompilationStartAnalysisContext compilationContext) => { Compilation compilation = compilationContext.Compilation; TaintedDataConfig taintedDataConfig = TaintedDataConfig.GetOrCreate(compilation); TaintedDataSymbolMap <SourceInfo> sourceInfoSymbolMap = taintedDataConfig.GetSourceSymbolMap(this.SinkKind); if (sourceInfoSymbolMap.IsEmpty) { return; } TaintedDataSymbolMap <SinkInfo> sinkInfoSymbolMap = taintedDataConfig.GetSinkSymbolMap(this.SinkKind); if (sinkInfoSymbolMap.IsEmpty) { return; } compilationContext.RegisterOperationBlockStartAction( operationBlockStartContext => { ISymbol owningSymbol = operationBlockStartContext.OwningSymbol; AnalyzerOptions options = operationBlockStartContext.Options; CancellationToken cancellationToken = operationBlockStartContext.CancellationToken; if (options.IsConfiguredToSkipAnalysis(TaintedDataEnteringSinkDescriptor, owningSymbol, compilation, cancellationToken)) { return; } WellKnownTypeProvider wellKnownTypeProvider = WellKnownTypeProvider.GetOrCreate(compilation); Lazy <ControlFlowGraph?> controlFlowGraphFactory = new Lazy <ControlFlowGraph?>( () => operationBlockStartContext.OperationBlocks.GetControlFlowGraph()); Lazy <PointsToAnalysisResult?> pointsToFactory = new Lazy <PointsToAnalysisResult?>( () => { if (controlFlowGraphFactory.Value == null) { return(null); } InterproceduralAnalysisConfiguration interproceduralAnalysisConfiguration = InterproceduralAnalysisConfiguration.Create( options, SupportedDiagnostics, controlFlowGraphFactory.Value, operationBlockStartContext.Compilation, defaultInterproceduralAnalysisKind: InterproceduralAnalysisKind.ContextSensitive, cancellationToken: cancellationToken); return(PointsToAnalysis.TryGetOrComputeResult( controlFlowGraphFactory.Value, owningSymbol, options, wellKnownTypeProvider, PointsToAnalysisKind.Complete, interproceduralAnalysisConfiguration, interproceduralAnalysisPredicate: null)); }); Lazy <(PointsToAnalysisResult?, ValueContentAnalysisResult?)> valueContentFactory = new Lazy <(PointsToAnalysisResult?, ValueContentAnalysisResult?)>( () => { if (controlFlowGraphFactory.Value == null) { return(null, null); } InterproceduralAnalysisConfiguration interproceduralAnalysisConfiguration = InterproceduralAnalysisConfiguration.Create( options, SupportedDiagnostics, controlFlowGraphFactory.Value, operationBlockStartContext.Compilation, defaultInterproceduralAnalysisKind: InterproceduralAnalysisKind.ContextSensitive, cancellationToken: cancellationToken); ValueContentAnalysisResult?valuecontentAnalysisResult = ValueContentAnalysis.TryGetOrComputeResult( controlFlowGraphFactory.Value, owningSymbol, options, wellKnownTypeProvider, PointsToAnalysisKind.Complete, interproceduralAnalysisConfiguration, out _, out PointsToAnalysisResult? p); return(p, valuecontentAnalysisResult); }); PooledHashSet <IOperation> rootOperationsNeedingAnalysis = PooledHashSet <IOperation> .GetInstance(); operationBlockStartContext.RegisterOperationAction( operationAnalysisContext => { IPropertyReferenceOperation propertyReferenceOperation = (IPropertyReferenceOperation)operationAnalysisContext.Operation; if (sourceInfoSymbolMap.IsSourceProperty(propertyReferenceOperation.Property)) { lock (rootOperationsNeedingAnalysis) { rootOperationsNeedingAnalysis.Add(propertyReferenceOperation.GetRoot()); } } }, OperationKind.PropertyReference); if (sourceInfoSymbolMap.RequiresParameterReferenceAnalysis) { operationBlockStartContext.RegisterOperationAction( operationAnalysisContext => { IParameterReferenceOperation parameterReferenceOperation = (IParameterReferenceOperation)operationAnalysisContext.Operation; if (sourceInfoSymbolMap.IsSourceParameter(parameterReferenceOperation.Parameter, wellKnownTypeProvider)) { lock (rootOperationsNeedingAnalysis) { rootOperationsNeedingAnalysis.Add(parameterReferenceOperation.GetRoot()); } } }, OperationKind.ParameterReference); } operationBlockStartContext.RegisterOperationAction( operationAnalysisContext => { IInvocationOperation invocationOperation = (IInvocationOperation)operationAnalysisContext.Operation; if (sourceInfoSymbolMap.IsSourceMethod( invocationOperation.TargetMethod, invocationOperation.Arguments, pointsToFactory, valueContentFactory, out _)) { lock (rootOperationsNeedingAnalysis) { rootOperationsNeedingAnalysis.Add(invocationOperation.GetRoot()); } } }, OperationKind.Invocation); if (TaintedDataConfig.HasTaintArraySource(SinkKind)) { operationBlockStartContext.RegisterOperationAction( operationAnalysisContext => { IArrayInitializerOperation arrayInitializerOperation = (IArrayInitializerOperation)operationAnalysisContext.Operation; if (arrayInitializerOperation.GetAncestor <IArrayCreationOperation>(OperationKind.ArrayCreation)?.Type is IArrayTypeSymbol arrayTypeSymbol && sourceInfoSymbolMap.IsSourceConstantArrayOfType(arrayTypeSymbol, arrayInitializerOperation)) { lock (rootOperationsNeedingAnalysis) { rootOperationsNeedingAnalysis.Add(operationAnalysisContext.Operation.GetRoot()); } } }, OperationKind.ArrayInitializer); } operationBlockStartContext.RegisterOperationBlockEndAction( operationBlockAnalysisContext => { try { lock (rootOperationsNeedingAnalysis) { if (!rootOperationsNeedingAnalysis.Any()) { return; } if (controlFlowGraphFactory.Value == null) { return; } foreach (IOperation rootOperation in rootOperationsNeedingAnalysis) { TaintedDataAnalysisResult?taintedDataAnalysisResult = TaintedDataAnalysis.TryGetOrComputeResult( controlFlowGraphFactory.Value, operationBlockAnalysisContext.Compilation, operationBlockAnalysisContext.OwningSymbol, operationBlockAnalysisContext.Options, TaintedDataEnteringSinkDescriptor, sourceInfoSymbolMap, taintedDataConfig.GetSanitizerSymbolMap(this.SinkKind), sinkInfoSymbolMap, operationBlockAnalysisContext.CancellationToken); if (taintedDataAnalysisResult == null) { return; } foreach (TaintedDataSourceSink sourceSink in taintedDataAnalysisResult.TaintedDataSourceSinks) { if (!sourceSink.SinkKinds.Contains(this.SinkKind)) { continue; } foreach (SymbolAccess sourceOrigin in sourceSink.SourceOrigins) { // Something like: // CA3001: Potential SQL injection vulnerability was found where '{0}' in method '{1}' may be tainted by user-controlled data from '{2}' in method '{3}'. Diagnostic diagnostic = Diagnostic.Create( this.TaintedDataEnteringSinkDescriptor, sourceSink.Sink.Location, additionalLocations: new Location[] { sourceOrigin.Location }, messageArgs: new object[] { sourceSink.Sink.Symbol.ToDisplayString(SymbolDisplayFormat.MinimallyQualifiedFormat), sourceSink.Sink.AccessingMethod.ToDisplayString(SymbolDisplayFormat.MinimallyQualifiedFormat), sourceOrigin.Symbol.ToDisplayString(SymbolDisplayFormat.MinimallyQualifiedFormat), sourceOrigin.AccessingMethod.ToDisplayString(SymbolDisplayFormat.MinimallyQualifiedFormat) }); operationBlockAnalysisContext.ReportDiagnostic(diagnostic); } } } } } finally { rootOperationsNeedingAnalysis.Free(compilationContext.CancellationToken); } }); }); }); }
static void getBaseInterfaces(TypeSymbol derived, ArrayBuilder <NamedTypeSymbol> baseInterfaces, PooledHashSet <NamedTypeSymbol> interfacesLookedAt, ConsList <TypeSymbol> basesBeingResolved) { if (basesBeingResolved != null && basesBeingResolved.ContainsReference(derived)) { return; } ImmutableArray <NamedTypeSymbol> declaredInterfaces; switch (derived) { case TypeParameterSymbol typeParameter: declaredInterfaces = typeParameter.AllEffectiveInterfacesNoUseSiteDiagnostics; break; case NamedTypeSymbol namedType: declaredInterfaces = namedType.GetDeclaredInterfaces(basesBeingResolved); break; default: declaredInterfaces = derived.InterfacesNoUseSiteDiagnostics(basesBeingResolved); break; } foreach (var @interface in declaredInterfaces) { NamedTypeSymbol definition = @interface.OriginalDefinition; if (interfacesLookedAt.Add(definition)) { baseInterfaces.Add(definition); } } }
internal void ValidateParameterNameConflicts( ImmutableArray <TypeParameterSymbol> typeParameters, ImmutableArray <ParameterSymbol> parameters, bool allowShadowingNames, DiagnosticBag diagnostics) { PooledHashSet <string> tpNames = null; if (!typeParameters.IsDefaultOrEmpty) { tpNames = PooledHashSet <string> .GetInstance(); foreach (var tp in typeParameters) { var name = tp.Name; if (string.IsNullOrEmpty(name)) { continue; } if (!tpNames.Add(name)) { // Type parameter declaration name conflicts are detected elsewhere } else if (!allowShadowingNames) { ValidateDeclarationNameConflictsInScope(tp, diagnostics); } } } PooledHashSet <string> pNames = null; if (!parameters.IsDefaultOrEmpty) { pNames = PooledHashSet <string> .GetInstance(); foreach (var p in parameters) { var name = p.Name; if (string.IsNullOrEmpty(name)) { continue; } if (tpNames != null && tpNames.Contains(name)) { // CS0412: 'X': a parameter or local variable cannot have the same name as a method type parameter diagnostics.Add(ErrorCode.ERR_LocalSameNameAsTypeParam, GetLocation(p), name); } if (!pNames.Add(name)) { // The parameter name '{0}' is a duplicate diagnostics.Add(ErrorCode.ERR_DuplicateParamName, GetLocation(p), name); } else if (!allowShadowingNames) { ValidateDeclarationNameConflictsInScope(p, diagnostics); } } } tpNames?.Free(); pNames?.Free(); }
private void LowerDecisionDagCore(BoundDecisionDag decisionDag) { ImmutableArray <BoundDecisionDagNode> sortedNodes = decisionDag.TopologicallySortedNodes; var firstNode = sortedNodes[0]; switch (firstNode) { case BoundWhenDecisionDagNode _: case BoundLeafDecisionDagNode _: // If the first node is a leaf or when clause rather than the code for the // lowered decision dag, jump there to start. _loweredDecisionDag.Add(_factory.Goto(GetDagNodeLabel(firstNode))); break; } // Code for each when clause goes in the separate code section for its switch section. foreach (BoundDecisionDagNode node in sortedNodes) { if (node is BoundWhenDecisionDagNode w) { LowerWhenClause(w); } } ImmutableArray <BoundDecisionDagNode> nodesToLower = sortedNodes.WhereAsArray(n => n.Kind != BoundKind.WhenDecisionDagNode && n.Kind != BoundKind.LeafDecisionDagNode); var loweredNodes = PooledHashSet <BoundDecisionDagNode> .GetInstance(); for (int i = 0, length = nodesToLower.Length; i < length; i++) { BoundDecisionDagNode node = nodesToLower[i]; if (loweredNodes.Contains(node)) { Debug.Assert(!_dagNodeLabels.TryGetValue(node, out _)); continue; } if (this._dagNodeLabels.TryGetValue(node, out LabelSymbol label)) { _loweredDecisionDag.Add(_factory.Label(label)); } // If we can generate an IL switch instruction, do so if (GenerateSwitchDispatch(node, loweredNodes)) { continue; } // If we can generate a type test and cast more efficiently as an `is` followed by a null check, do so if (GenerateTypeTestAndCast(node, loweredNodes, nodesToLower, i)) { continue; } // We pass the node that will follow so we can permit a test to fall through if appropriate BoundDecisionDagNode nextNode = ((i + 1) < length) ? nodesToLower[i + 1] : null; if (nextNode != null && loweredNodes.Contains(nextNode)) { nextNode = null; } LowerDecisionDagNode(node, nextNode); } loweredNodes.Free(); }
private BoundExpression BindAnonymousObjectCreation(AnonymousObjectCreationExpressionSyntax node, BindingDiagnosticBag diagnostics) { // prepare var initializers = node.Initializers; int fieldCount = initializers.Count; bool hasError = false; // bind field initializers BoundExpression[] boundExpressions = new BoundExpression[fieldCount]; AnonymousTypeField[] fields = new AnonymousTypeField[fieldCount]; CSharpSyntaxNode[] fieldSyntaxNodes = new CSharpSyntaxNode[fieldCount]; // WARNING: Note that SemanticModel.GetDeclaredSymbol for field initializer node relies on // the fact that the order of properties in anonymous type template corresponds // 1-to-1 to the appropriate filed initializer syntax nodes; This means such // correspondence must be preserved all the time including erroneous scenarios // set of names already used var uniqueFieldNames = PooledHashSet <string> .GetInstance(); for (int i = 0; i < fieldCount; i++) { AnonymousObjectMemberDeclaratorSyntax fieldInitializer = initializers[i]; NameEqualsSyntax?nameEquals = fieldInitializer.NameEquals; ExpressionSyntax expression = fieldInitializer.Expression; SyntaxToken nameToken = default(SyntaxToken); if (nameEquals != null) { nameToken = nameEquals.Name.Identifier; } else { if (!IsAnonymousTypeMemberExpression(expression)) { hasError = true; diagnostics.Add(ErrorCode.ERR_InvalidAnonymousTypeMemberDeclarator, expression.GetLocation()); } nameToken = expression.ExtractAnonymousTypeMemberName(); } hasError |= expression.HasErrors; boundExpressions[i] = BindRValueWithoutTargetType(expression, diagnostics); // check the name to be unique string?fieldName = null; if (nameToken.Kind() == SyntaxKind.IdentifierToken) { fieldName = nameToken.ValueText; if (!uniqueFieldNames.Add(fieldName !)) { // name duplication Error(diagnostics, ErrorCode.ERR_AnonymousTypeDuplicatePropertyName, fieldInitializer); hasError = true; fieldName = null; } } else { // there is something wrong with field's name hasError = true; } // calculate the expression's type and report errors if needed TypeSymbol fieldType = GetAnonymousTypeFieldType(boundExpressions[i], fieldInitializer, diagnostics, ref hasError); // build anonymous type field descriptor fieldSyntaxNodes[i] = (nameToken.Kind() == SyntaxKind.IdentifierToken) ? (CSharpSyntaxNode)nameToken.Parent ! : fieldInitializer; fields[i] = new AnonymousTypeField( fieldName == null ? "$" + i.ToString() : fieldName, fieldSyntaxNodes[i].Location, TypeWithAnnotations.Create(fieldType), RefKind.None, DeclarationScope.Unscoped); // NOTE: ERR_InvalidAnonymousTypeMemberDeclarator (CS0746) would be generated by parser if needed } uniqueFieldNames.Free(); // Create anonymous type AnonymousTypeManager manager = this.Compilation.AnonymousTypeManager; AnonymousTypeDescriptor descriptor = new AnonymousTypeDescriptor(fields.AsImmutableOrNull(), node.NewKeyword.GetLocation()); NamedTypeSymbol anonymousType = manager.ConstructAnonymousTypeSymbol(descriptor); // declarators - bound nodes created for providing semantic info // on anonymous type fields having explicitly specified name ArrayBuilder <BoundAnonymousPropertyDeclaration> declarators = ArrayBuilder <BoundAnonymousPropertyDeclaration> .GetInstance(); for (int i = 0; i < fieldCount; i++) { NameEqualsSyntax?explicitName = initializers[i].NameEquals; if (explicitName != null) { AnonymousTypeField field = fields[i]; if (field.Name != null) { // get property symbol and create a bound property declaration node foreach (var symbol in anonymousType.GetMembers(field.Name)) { if (symbol.Kind == SymbolKind.Property) { declarators.Add(new BoundAnonymousPropertyDeclaration(fieldSyntaxNodes[i], (PropertySymbol)symbol, field.Type)); break; } } } } } // check if anonymous object creation is allowed in this context if (!this.IsAnonymousTypesAllowed()) { Error(diagnostics, ErrorCode.ERR_AnonymousTypeNotAvailable, node.NewKeyword); hasError = true; } // Finally create a bound node return(new BoundAnonymousObjectCreationExpression( node, anonymousType.InstanceConstructors[0], boundExpressions.AsImmutableOrNull(), declarators.ToImmutableAndFree(), anonymousType, hasError)); }
protected override async Task <Solution> GetChangedSolutionAsync(CancellationToken cancellationToken) { var updatedPublicSurfaceAreaText = new List <KeyValuePair <DocumentId, SourceText> >(); foreach (KeyValuePair <Project, ImmutableArray <Diagnostic> > pair in _diagnosticsToFix) { Project project = pair.Key; ImmutableArray <Diagnostic> diagnostics = pair.Value; TextDocument publicSurfaceAreaAdditionalDocument = GetPublicSurfaceAreaDocument(project); if (publicSurfaceAreaAdditionalDocument == null) { continue; } SourceText sourceText = await publicSurfaceAreaAdditionalDocument.GetTextAsync(cancellationToken).ConfigureAwait(false); IEnumerable <IGrouping <SyntaxTree, Diagnostic> > groupedDiagnostics = diagnostics .Where(d => d.Location.IsInSource) .GroupBy(d => d.Location.SourceTree); var newSymbolNames = new List <string>(); var symbolNamesToRemoveBuilder = PooledHashSet <string> .GetInstance(); foreach (IGrouping <SyntaxTree, Diagnostic> grouping in groupedDiagnostics) { Document document = project.GetDocument(grouping.Key); if (document == null) { continue; } SyntaxNode root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); SemanticModel semanticModel = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); foreach (Diagnostic diagnostic in grouping) { string publicSurfaceAreaSymbolName = diagnostic.Properties[DeclarePublicApiAnalyzer.PublicApiNamePropertyBagKey]; newSymbolNames.Add(publicSurfaceAreaSymbolName); string siblingNamesToRemove = diagnostic.Properties[DeclarePublicApiAnalyzer.PublicApiNamesOfSiblingsToRemovePropertyBagKey]; if (siblingNamesToRemove.Length > 0) { var namesToRemove = siblingNamesToRemove.Split(DeclarePublicApiAnalyzer.PublicApiNamesOfSiblingsToRemovePropertyBagValueSeparator.ToCharArray()); foreach (var nameToRemove in namesToRemove) { symbolNamesToRemoveBuilder.Add(nameToRemove); } } } } var symbolNamesToRemove = symbolNamesToRemoveBuilder.ToImmutableAndFree(); // We shouldn't be attempting to remove any symbol name, while also adding it. Debug.Assert(newSymbolNames.All(newSymbolName => !symbolNamesToRemove.Contains(newSymbolName))); SourceText newSourceText = AddSymbolNamesToSourceText(sourceText, newSymbolNames); newSourceText = RemoveSymbolNamesFromSourceText(newSourceText, symbolNamesToRemove); updatedPublicSurfaceAreaText.Add(new KeyValuePair <DocumentId, SourceText>(publicSurfaceAreaAdditionalDocument.Id, newSourceText)); } Solution newSolution = _solution; foreach (KeyValuePair <DocumentId, SourceText> pair in updatedPublicSurfaceAreaText) { newSolution = newSolution.WithAdditionalDocumentText(pair.Key, pair.Value); } return(newSolution); }
public SyntaxAndDeclarationManager RemoveSyntaxTrees(HashSet <SyntaxTree> trees) { var state = _lazyState; var newExternalSyntaxTrees = this.ExternalSyntaxTrees.RemoveAll(t => trees.Contains(t)); if (state == null) { return(this.WithExternalSyntaxTrees(newExternalSyntaxTrees)); } var syntaxTrees = state.SyntaxTrees; var loadDirectiveMap = state.LoadDirectiveMap; var loadedSyntaxTreeMap = state.LoadedSyntaxTreeMap; var removeSet = PooledHashSet <SyntaxTree> .GetInstance(); foreach (var tree in trees) { int unused1; ImmutableArray <LoadDirective> unused2; GetRemoveSet( tree, includeLoadedTrees: true, syntaxTrees: syntaxTrees, syntaxTreeOrdinalMap: state.OrdinalMap, loadDirectiveMap: loadDirectiveMap, loadedSyntaxTreeMap: loadedSyntaxTreeMap, removeSet: removeSet, totalReferencedTreeCount: out unused1, oldLoadDirectives: out unused2); } var treesBuilder = ArrayBuilder <SyntaxTree> .GetInstance(); var ordinalMapBuilder = PooledDictionary <SyntaxTree, int> .GetInstance(); var declMapBuilder = state.RootNamespaces.ToBuilder(); var declTable = state.DeclarationTable; foreach (var tree in syntaxTrees) { if (removeSet.Contains(tree)) { loadDirectiveMap = loadDirectiveMap.Remove(tree); loadedSyntaxTreeMap = loadedSyntaxTreeMap.Remove(tree.FilePath); RemoveSyntaxTreeFromDeclarationMapAndTable(tree, declMapBuilder, ref declTable); } else if (!IsLoadedSyntaxTree(tree, loadedSyntaxTreeMap)) { UpdateSyntaxTreesAndOrdinalMapOnly( treesBuilder, tree, ordinalMapBuilder, loadDirectiveMap, loadedSyntaxTreeMap); } } removeSet.Free(); state = new State( treesBuilder.ToImmutableAndFree(), ordinalMapBuilder.ToImmutableDictionaryAndFree(), loadDirectiveMap, loadedSyntaxTreeMap, declMapBuilder.ToImmutableDictionary(), declTable); return(new SyntaxAndDeclarationManager( newExternalSyntaxTrees, this.ScriptClassName, this.Resolver, this.MessageProvider, this.IsSubmission, state)); }
public static GlobalFlowStateAnalysisValueSet Intersect(GlobalFlowStateAnalysisValueSet value1, GlobalFlowStateAnalysisValueSet value2) { if (value1 == null) { return(value2); } else if (value2 == null) { return(value1); } else if (value1.Kind == GlobalFlowStateAnalysisValueSetKind.Unset) { return(value2); } else if (value2.Kind == GlobalFlowStateAnalysisValueSetKind.Unset) { return(value1); } else if (value1.Kind == GlobalFlowStateAnalysisValueSetKind.Unknown || value2.Kind == GlobalFlowStateAnalysisValueSetKind.Unknown) { return(GlobalFlowStateAnalysisValueSet.Unknown); } else if (value1.Kind == GlobalFlowStateAnalysisValueSetKind.Empty || value2.Kind == GlobalFlowStateAnalysisValueSetKind.Empty) { return(GlobalFlowStateAnalysisValueSet.Empty); } else if (value1 == value2) { return(value1); } Debug.Assert(value1.Kind == GlobalFlowStateAnalysisValueSetKind.Known); Debug.Assert(value2.Kind == GlobalFlowStateAnalysisValueSetKind.Known); if (value1.Height == 0 && value2.Height == 0) { return(Intersect(value1, value2)); } var currentNodes = new Queue <GlobalFlowStateAnalysisValueSet>(); using var candidateNodes = PooledHashSet <GlobalFlowStateAnalysisValueSet> .GetInstance(); int candidateHeight = 0; if (value1.Height <= value2.Height) { candidateNodes.Add(value1); currentNodes.Enqueue(value2); candidateHeight = value1.Height; } if (value2.Height <= value1.Height) { candidateNodes.Add(value2); currentNodes.Enqueue(value1); candidateHeight = value2.Height; } while (currentNodes.Count > 0) { var node = currentNodes.Dequeue(); foreach (var parent in node.Parents) { if (candidateNodes.Contains(parent)) { continue; } if (parent.Height > candidateHeight) { currentNodes.Enqueue(parent); continue; } foreach (var candidate in candidateNodes) { currentNodes.Enqueue(candidate); } if (parent.Height < candidateHeight) { candidateNodes.Clear(); } candidateNodes.Add(parent); candidateHeight = parent.Height; } } if (candidateNodes.Count == 1) { return(candidateNodes.Single()); } GlobalFlowStateAnalysisValueSet?result = null; foreach (var candidate in candidateNodes) { if (result == null) { result = candidate; } else if (!TryIntersect(candidate, result, out result)) { return(GlobalFlowStateAnalysisValueSet.Empty); } } return(result !);
/// <summary> /// Used when iterating through base types in contexts in which the caller needs to avoid cycles and can't use BaseType /// (perhaps because BaseType is in the process of being computed) /// </summary> /// <param name="type"></param> /// <param name="basesBeingResolved"></param> /// <param name="compilation"></param> /// <param name="visited"></param> /// <returns></returns> internal static TypeSymbol GetNextBaseTypeNoUseSiteDiagnostics(this TypeSymbol type, ConsList<Symbol> basesBeingResolved, CSharpCompilation compilation, ref PooledHashSet<NamedTypeSymbol> visited) { switch (type.TypeKind) { case TypeKind.TypeParameter: return ((TypeParameterSymbol)type).EffectiveBaseClassNoUseSiteDiagnostics; case TypeKind.Class: case TypeKind.Struct: case TypeKind.Error: case TypeKind.Interface: return GetNextDeclaredBase((NamedTypeSymbol)type, basesBeingResolved, compilation, ref visited); default: // Enums and delegates know their own base types // intrinsically (and do not include interface lists) // so there is no possibility of a cycle. return type.BaseTypeNoUseSiteDiagnostics; } }
public static int Main(string[] args) { const int expectedArguments = 9; if (args.Length != expectedArguments) { Console.Error.WriteLine($"Expected {expectedArguments} arguments, found {args.Length}: {string.Join(';', args)}"); return(1); } var outputDir = args[0]; var packageName = args[1]; string targetsFileDir = args[2]; string targetsFileName = args[3]; var assemblyList = args[4].Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries).ToList(); var binDirectory = args[5]; var configuration = args[6]; var tfm = args[7]; var releaseTrackingOptOutString = args[8]; if (!bool.TryParse(releaseTrackingOptOutString, out bool releaseTrackingOptOut)) { releaseTrackingOptOut = false; } using var shippedFilesDataBuilder = ArrayBuilder <ReleaseTrackingData> .GetInstance(); using var versionsBuilder = PooledHashSet <Version> .GetInstance(); // Validate all assemblies exist on disk and can be loaded. foreach (string assembly in assemblyList) { var assemblyPath = GetAssemblyPath(assembly); if (!File.Exists(assemblyPath)) { Console.Error.WriteLine($"'{assemblyPath}' does not exist"); return(2); } try { _ = Assembly.LoadFrom(assemblyPath); } #pragma warning disable CA1031 // Do not catch general exception types catch (Exception ex) #pragma warning restore CA1031 // Do not catch general exception types { Console.Error.WriteLine(ex.Message); return(3); } } // Compute descriptors by rule ID and shipped analyzer release versions and shipped data. var allRulesById = new SortedList <string, DiagnosticDescriptor>(); var sawShippedFile = false; foreach (string assembly in assemblyList) { var assemblyPath = GetAssemblyPath(assembly); var analyzerFileReference = new AnalyzerFileReference(assemblyPath, AnalyzerAssemblyLoader.Instance); analyzerFileReference.AnalyzerLoadFailed += AnalyzerFileReference_AnalyzerLoadFailed; var analyzers = analyzerFileReference.GetAnalyzersForAllLanguages(); foreach (var analyzer in analyzers) { foreach (var rule in analyzer.SupportedDiagnostics) { allRulesById[rule.Id] = rule; } } var assemblyDir = Path.GetDirectoryName(assemblyPath); if (assemblyDir is null) { continue; } var assemblyName = Path.GetFileNameWithoutExtension(assembly); var shippedFile = Path.Combine(assemblyDir, "AnalyzerReleases", assemblyName, ReleaseTrackingHelper.ShippedFileName); if (File.Exists(shippedFile)) { sawShippedFile = true; if (releaseTrackingOptOut) { Console.Error.WriteLine($"'{shippedFile}' exists but was not expected"); return(4); } try { using var fileStream = File.OpenRead(shippedFile); var sourceText = SourceText.From(fileStream); var releaseTrackingData = ReleaseTrackingHelper.ReadReleaseTrackingData(shippedFile, sourceText, onDuplicateEntryInRelease: (_1, _2, _3, _4, line) => throw new Exception($"Duplicate entry in {shippedFile} at {line.LineNumber}: '{line}'"), onInvalidEntry: (line, _2, _3, _4) => throw new Exception($"Invalid entry in {shippedFile} at {line.LineNumber}: '{line}'"), isShippedFile: true); shippedFilesDataBuilder.Add(releaseTrackingData); versionsBuilder.AddRange(releaseTrackingData.Versions); } #pragma warning disable CA1031 // Do not catch general exception types catch (Exception ex) #pragma warning restore CA1031 // Do not catch general exception types { Console.Error.WriteLine(ex.Message); return(5); } } } if (!releaseTrackingOptOut && !sawShippedFile) { Console.Error.WriteLine($"Could not find any 'AnalyzerReleases.Shipped.md' file"); return(6); } if (versionsBuilder.Count > 0) { var shippedFilesData = shippedFilesDataBuilder.ToImmutable(); // Generate global analyzer config files for each shipped version, if required. foreach (var version in versionsBuilder) { var analysisLevelVersionString = GetNormalizedVersionStringForEditorconfigFileNameSuffix(version); foreach (var analysisMode in Enum.GetValues(typeof(AnalysisMode))) { CreateEditorconfig( outputDir, $"AnalysisLevel_{analysisLevelVersionString}_{analysisMode}.editorconfig", $"Rules from '{version}' release with '{analysisMode}' analysis mode", $"Rules with enabled-by-default state from '{version}' release with '{analysisMode}' analysis mode. Rules that are first released in a version later than '{version}' are disabled.", (AnalysisMode)analysisMode !, allRulesById, (shippedFilesData, version)); } } } CreateTargetsFile(targetsFileDir, targetsFileName, packageName); return(0);
private static void SetKnownToHaveNoDeclaredBaseCycles(ref PooledHashSet<NamedTypeSymbol> visited) { if (visited != null) { foreach (var v in visited) { v.SetKnownToHaveNoDeclaredBaseCycles(); } visited.Free(); visited = null; } }
public static PooledDisposer <PooledHashSet <T> > GetInstance(out PooledHashSet <T> instance) { instance = GetInstance(); return(new PooledDisposer <PooledHashSet <T> >(instance)); }