public void AnalyzeMethod(SyntaxNodeAnalysisContext obj) { MethodDeclarationSyntax methodDeclarationSyntax = (MethodDeclarationSyntax)obj.Node; SyntaxNode body = methodDeclarationSyntax.Body; if (body == null) { return; } if (_exceptions.Contains(methodDeclarationSyntax.Identifier.ValueText)) { return; } RollingTokenSet rollingTokenSet = new RollingTokenSet(_duplicateTokenThreshold); foreach (SyntaxToken token in body.DescendantTokens()) { // For every set of token_count contiguous tokens, create a hash and add it to a dictionary with some evidence. (int hash, Evidence evidence) = rollingTokenSet.Add(new TokenInfo(token)); if (rollingTokenSet.IsFull()) { Evidence existingEvidence = _library.TryAdd(hash, evidence); if (existingEvidence != null) { Location location = evidence.LocationEnvelope.Contents(); Location existingEvidenceLocation = existingEvidence.LocationEnvelope.Contents(); // We found a duplicate, but if it's partially duplicated with itself, ignore it. if (!location.SourceSpan.IntersectsWith(existingEvidenceLocation.SourceSpan)) { string reference = ToPrettyReference(existingEvidenceLocation.GetLineSpan()); _diagnostics.Add(Diagnostic.Create(Rule, location, new List <Location>() { existingEvidenceLocation }, reference)); if (_generateExceptionsFile) { File.AppendAllText(@"DuplicateCode.Allowed.GENERATED.txt", methodDeclarationSyntax.Identifier.ValueText + Environment.NewLine); } // Don't pile on. Move on to the next method. return; } // Don't pile on. Move on to the next method. return; } } } }
public void AnalyzeMethod(SyntaxNodeAnalysisContext obj) { try { MethodDeclarationSyntax methodDeclarationSyntax = (MethodDeclarationSyntax)obj.Node; SyntaxNode body = methodDeclarationSyntax.Body; if (body == null) { return; } if (_exceptions.Contains(methodDeclarationSyntax.Identifier.ValueText)) { return; } RollingTokenSet rollingTokenSet = new RollingTokenSet(_duplicateTokenThreshold); foreach (SyntaxToken token in body.DescendantTokens()) { GetShapeDetails(token); // For every set of token_count contiguous tokens, create a hash and add it to a dictionary with some evidence. (int hash, Evidence evidence) = rollingTokenSet.Add(new TokenInfo(token)); if (rollingTokenSet.IsFull()) { Evidence existingEvidence = _library.TryAdd(hash, evidence); if (existingEvidence != null) { Location location = evidence.LocationEnvelope.Contents(); Location existingEvidenceLocation = existingEvidence.LocationEnvelope.Contents(); // We found a duplicate, but if it's partially duplicated with itself, ignore it. if (!location.SourceSpan.IntersectsWith(existingEvidenceLocation.SourceSpan)) { string shapeDetails = GetShapeDetails(token); string reference = ToPrettyReference(existingEvidenceLocation.GetLineSpan()); _diagnostics.Add(Diagnostic.Create(Rule, location, new List <Location>() { existingEvidenceLocation }, reference, shapeDetails)); if (_generateExceptionsFile) { File.AppendAllText(@"DuplicateCode.Allowed.GENERATED.txt", methodDeclarationSyntax.Identifier.ValueText + Environment.NewLine); } // Don't pile on. Move on to the next method. return; } // Don't pile on. Move on to the next method. return; } } } } catch (Exception ex) { string result = string.Empty; string[] lines = ex.StackTrace.Split(new char[] { ':' }, StringSplitOptions.RemoveEmptyEntries); foreach (string line in lines) { if (line.StartsWith("line") && line.Length >= 8) { result += line.Substring(0, 8); result += " "; } } if (string.IsNullOrWhiteSpace(result)) { result = ex.StackTrace.Replace(Environment.NewLine, " ## "); } _diagnostics.Add(Diagnostic.Create(UnhandledExceptionRule, obj.Node.GetLocation(), result, ex.Message)); } }