/// <summary> /// Analyzes the document. /// </summary> /// <param name="document">The document.</param> public override void AnalyzeDocument(CodeDocument document) { if (document == null) throw new ArgumentNullException("document"); try { this.Bootstrapper(); this.analyzerRegistry.RuleAnalyzers.ForEach(analyzer => analyzer.Initialize(this)); this.analyzerRegistry.RuleAnalyzers.ForEach(analyzer => analyzer.VisitBeforeAnalysis(document)); var csharpDocument = (CsDocument)document; this.VisiteDocument(csharpDocument); csharpDocument.WalkDocument( this.VisitElement, this.VisitStatement, this.VisitExpression, this.VisitQueryClause); csharpDocument.Tokens.ForEach(token => this.analyzerRegistry.RuleAnalyzers.ForEach(analyzer => analyzer.VisitToken(document, token))); this.analyzerRegistry.RuleAnalyzers.ForEach(analyzer => analyzer.VisitAfterAnalysis(document)); } catch (Exception ex) { throw new Exception(ex.ToString()); } }
/// <summary> /// Runs the analyzers against the given document. /// </summary> /// <param name="document"> /// The document to analyze. /// </param> /// <param name="parser"> /// The parser that created the document. /// </param> /// <param name="analyzers"> /// The analyzers to run against the document. /// </param> /// <param name="passNumber"> /// The current pass number. /// </param> /// <returns> /// Returns true if analysis was run, or false if analysis was delayed until the next pass. /// </returns> private bool TestAndRunAnalyzers(CodeDocument document, SourceParser parser, IEnumerable <SourceAnalyzer> analyzers, int passNumber) { Param.AssertNotNull(document, "document"); Param.AssertNotNull(parser, "parser"); Param.Ignore(analyzers); Param.Ignore(passNumber); StyleCopTrace.In(document, parser, analyzers, passNumber); if (analyzers == null) { return(StyleCopTrace.Out(true)); } // Determine whether any of the analyzers wish to delay analysis until the next pass. bool delay = false; foreach (SourceAnalyzer analyzer in analyzers) { if (analyzer.DelayAnalysis(document, passNumber)) { delay = true; break; } } if (!delay) { this.RunAnalyzers(document, parser, analyzers); } return(StyleCopTrace.Out(!delay)); }
public override void AnalyzeDocument(CodeDocument document) { //if (!string.IsNullOrEmpty(_projectDir)) //{ // return; //} System.Diagnostics.Debugger.Break(); _projectDir = document.SourceCode.Project.Location; var folderList = this.GetSetting(document.Settings, "FolderList") as CollectionProperty; var folders = new List<string>(); foreach (var i in folderList) { var path = Path.Combine(_projectDir, i); if (!Directory.Exists(path)) { folders.Add(i); } } if (folders.Count > 0) { var s = string.Join(", ", folders.ToArray()); AddViolation(document.DocumentContents, "FoldersMustExist", s); } }
internal void Validate(CodeDocument document) { this.ValidateAtLeastOneUsingDirectiveMustBePresent(document); this.ValidateFirstUsingDirectiveMustBeSystem(); IEnumerable<UsingDirectiveGroup> groups = this.usingSettings.GetUsingDirectiveByGroup(this.usingDirectives).ToList(); this.ValidateUsingGroupOrderMustBeRespected(groups); this.ValidateUsingGroupMustBeSeparatedByABlankLine(groups); foreach (UsingDirectiveGroup group in groups) { if (!this.usingSettings.AliasShouldBeLast) { this.ValidateUsingDirectiveMustBeSortedAlphabeticallyInsideAGroup(group); } else { this.ValidateAliasUsingDirectiveMustBeLastInsideAGroup(group); this.ValidateUsingDirectiveMustBeSortedAlphabeticallyInsideAGroup(UsingValidator.GetGroupWithoutAlias(group)); this.ValidateAliasMustBeSortedAlphabeticallyInsideAGroup(UsingValidator.GetGroupWithOnlyAlias(group)); } } }
/// <summary> /// Analyzes source document. /// </summary> public void AnalyzeDocument(CodeDocument document) { CurrentNamingSettings settings = new CurrentNamingSettings(); settings.Initialize(m_parent, document); CsDocument doc = (CsDocument)document; AnalyzeElements(doc.RootElement.ChildElements, settings); }
public override void AnalyzeDocument(CodeDocument document) { CsDocument csdocument = (CsDocument)document; if (csdocument.RootElement != null && !csdocument.RootElement.Generated) { csdocument.WalkDocument(null, null, new CodeWalkerExpressionVisitor<object>(this.VistExpression), null); } }
public override void AnalyzeDocument(CodeDocument document) { CsDocument csdocument = (CsDocument)document; if (csdocument.RootElement != null && !csdocument.RootElement.Generated) { csdocument.WalkDocument(elementCallback); } }
/// <summary> /// Initializes settings from specified document. /// </summary> public void Initialize(SourceAnalyzer analyzer, CodeDocument document) { InitializeCommon(analyzer, document); InitializeDerivings(analyzer, document); InitializeBlockAt(analyzer, document); InitializeEnglishOnly(analyzer, document); InitializeCheckLength(analyzer, document); }
public override void AnalyzeDocument(CodeDocument document) { CsDocument csdocument = (CsDocument)document; if (csdocument.RootElement != null && !csdocument.RootElement.Generated) { this.CheckSpacing(csdocument.Tokens, false, null); } }
public override void AnalyzeDocument(CodeDocument document) { CsDocument csdocument = (CsDocument)document; if (csdocument.RootElement != null && !csdocument.RootElement.Generated) { csdocument.WalkDocument(delegate { return true; }, delegate { return true; }, this.ExpressionCallback); } }
/// <summary> /// Runs the list of analyzers against the given document. /// </summary> /// <param name="document"> /// The document to analyze. /// </param> /// <param name="parser"> /// The parser that created the document. /// </param> /// <param name="analyzers"> /// The list of analyzers to run against the document. /// </param> private void RunAnalyzers(CodeDocument document, SourceParser parser, IEnumerable <SourceAnalyzer> analyzers) { Param.AssertNotNull(document, "document"); Param.AssertNotNull(parser, "parser"); Param.Ignore(analyzers, "analyzers"); StyleCopTrace.In(document, parser, analyzers); if (analyzers != null) { if (parser.SkipAnalysisForDocument(document.SourceCode)) { string format = string.Format(CultureInfo.CurrentCulture, "Skipping: {0} - {1}", document.SourceCode.Project.Location.SubstringAfterLast('\\'), GetRelativeFileName(document.SourceCode)); this.data.Core.SignalOutput(MessageImportance.Normal, format); } else { // Loop through each of the parser's analyzers. // Only call analyzers that are also in the enabled list. foreach (SourceAnalyzer analyzer in parser.Analyzers) { SourceAnalyzer localAnalyzer = analyzer; if (analyzers.Any(enabledAnalyzers => enabledAnalyzers.Id == localAnalyzer.Id)) { // Make sure the user hasn't cancelled us. if (this.data.Core.Cancel) { break; } SourceParser.ClearAnalyzerTags(document); try { if (analyzer.DoAnalysis(document)) { analyzer.AnalyzeDocument(document); } } catch (Exception ex) { StringBuilder details = new StringBuilder(); details.AppendLine(string.Format(CultureInfo.CurrentCulture, "Exception thrown by analyzer '{0}' while processing '{1}'.", analyzer.Name, document.SourceCode.Path)); // Add exception message for help on bugfix. if (!string.IsNullOrEmpty(ex.Message)) { details.AppendLine(string.Format(CultureInfo.CurrentCulture, "Exception message : {0}", ex.Message)); } this.data.Core.SignalOutput(MessageImportance.High, details.ToString()); throw; } } } } } StyleCopTrace.Out(); }
/// <summary> /// Clears the analyzer tags for the given document and all of its children. /// </summary> /// <param name="document"> /// The document to clear. /// </param> /// <remarks> /// <para> /// During each analysis run, analyzers can store data within each analyzed document for /// later use. Analyzers store and retrieve this data using the <see cref="SourceAnalyzer.GetDocumentData"/> /// and <see cref="SourceAnalyzer.SetDocumentData"/> methods. /// </para> /// <para> /// After all analysis has been completed, this analyzer data should be cleared so that /// it will not conflict with the next analysis. This method can be called to clear all /// analyzer data which was stored during the previous analysis. /// </para> /// </remarks> internal static void ClearAnalyzerTags(CodeDocument document) { Param.AssertNotNull(document, "document"); if (document != null && document.DocumentContents != null) { document.DocumentContents.ClearAnalyzerTags(); } }
/// <summary> /// Analyzes source document. /// </summary> public void AnalyzeDocument(CodeDocument document) { CustomRulesSettings settings = new CustomRulesSettings(); settings.Initialize(m_parent, document); CsDocument doc = (CsDocument)document; AnalyzePlainText(doc, settings); AnalyzeElements(doc.RootElement.ChildElements, settings); }
public override void AnalyzeDocument(CodeDocument currentCodeDocument) { var codeDocument = (CsDocument)currentCodeDocument; if (codeDocument.RootElement != null && !codeDocument.RootElement.Generated) { _filename = codeDocument.SourceCode.Name; codeDocument.WalkDocument(new CodeWalkerElementVisitor <object>(this.InspectCurrentElement), null, null); } }
/// <summary> /// Gets the data saved by this analyzer within the given document. /// </summary> /// <param name="document"> /// The document containing the data. /// </param> /// <returns> /// Returns the data if it exists. /// </returns> protected object GetDocumentData(CodeDocument document) { Param.RequireNotNull(document, "document"); object data = null; document.AnalyzerData.TryGetValue(this.Id, out data); return(data); }
public override void AnalyzeDocument(CodeDocument document) { Param.RequireNotNull(document, "document"); var csdocument = (CsDocument)document; if (csdocument.RootElement != null && !csdocument.RootElement.Generated) { CheckSpacing(csdocument.Tokens); } }
public override void AnalyzeDocument(CodeDocument document) { CsDocument csdocument = (CsDocument)document; if (csdocument.RootElement != null && !csdocument.RootElement.Generated) { csdocument.WalkDocument(new CodeWalkerElementVisitor<object>(this.AnalyzeSourceCodeForNamingConvention), null, null); csdocument.WalkDocument(new CodeWalkerElementVisitor<object>(this.AnalyzeSourceCodeForComments), null, null); } }
/// <summary> /// Analyzes source document. /// </summary> public void AnalyzeDocument(CodeDocument document) { CheckOriginalRule(document, Constants.LayoutRulesAnalyzerId, Rules.ElementMustNotBeOnSingleLine); CheckOriginalRule(document, Constants.LayoutRulesAnalyzerId, Rules.ClosingCurlyBracketMustBeFollowedByBlankLine); CheckOriginalRule(document, Constants.LayoutRulesAnalyzerId, Rules.ElementsMustBeSeparatedByBlankLine); m_customNamingAnalyzer.AnalyzeDocument(document); m_customLayoutAnalyzer.AnalyzeDocument(document); m_customDocumentationAnalyzer.AnalyzeDocument(document); }
public override void AnalyzeDocument(CodeDocument currentCodeDocument) { var codeDocument = (CsDocument)currentCodeDocument; if (codeDocument.RootElement != null && !codeDocument.RootElement.Generated) { codeDocument.WalkDocument(new CodeWalkerElementVisitor<object>(this.InspectCurrentElement), null, null); } }
public override void AnalyzeDocument(CodeDocument document) { var csharpDocument = (CsDocument)document; if (csharpDocument.RootElement != null && !csharpDocument.RootElement.Generated) { CheckLineSpace( csharpDocument, IsRuleEnabled(csharpDocument, TrailingWhiteSpacesRuleName), IsRuleEnabled(csharpDocument, TabIndentationRuleName)); } }
/// <summary> /// Extremely simple analyzer for demo purposes. /// </summary> public override void AnalyzeDocument(CodeDocument document) { CsDocument doc = (CsDocument)document; // skipping wrong or auto-generated documents if (doc.RootElement == null || doc.RootElement.Generated) return; // check all class entries doc.WalkDocument(CheckClasses); }
public override void AnalyzeDocument(CodeDocument document) { var csharpDocument = (CsDocument)document; if (csharpDocument.RootElement != null && !csharpDocument.RootElement.Generated) { if (IsRuleEnabled(csharpDocument, RuleName)) { CheckDocumentHeader(csharpDocument); } } }
/// <summary> /// Gets a value indicating whether the given rule is enabled for the given document. /// </summary> /// <param name="document"> /// The document. /// </param> /// <param name="ruleName"> /// The rule to check. /// </param> /// <returns> /// Returns true if the rule is enabled; otherwise false. /// </returns> public override bool IsRuleEnabled(CodeDocument document, string ruleName) { Param.RequireNotNull(document, "document"); Param.RequireValidString(ruleName, "ruleName"); if (document.SourceCode == null || document.SourceCode.Settings == null) { return(true); } return(document.SourceCode.Settings.IsRuleEnabled(this, ruleName)); }
/// <summary> /// Checks whether specified rule is enabled. /// </summary> public override bool IsRuleEnabled(CodeDocument document, string ruleName) { if (SpecialRunningParameters != null) { if (!String.IsNullOrEmpty(SpecialRunningParameters.OnlyEnabledRule)) { return ruleName == SpecialRunningParameters.OnlyEnabledRule; } } return base.IsRuleEnabled(document, ruleName); }
/// <summary> /// Analyzes source document. /// </summary> public override void AnalyzeDocument(CodeDocument document) { CsDocument doc = (CsDocument)document; if (doc.RootElement == null || doc.RootElement.Generated) return; if (IsRuleEnabled(document, Rules.AdvancedNamingRules.ToString())) m_advancedNamingRules.AnalyzeDocument(document); m_extendedOriginalRules.AnalyzeDocument(document); m_moreCustomRules.AnalyzeDocument(document); }
/// <summary> /// Extremely simple analyser for demo purposes /// </summary> /// <param name="document"> /// The complete code document being passed in for inspection /// </param> public override void AnalyzeDocument(CodeDocument document) { Param.RequireNotNull(document, "document"); var doc = (CsDocument)document; // skipping wrong or auto-generated documents if (doc.RootElement == null || doc.RootElement.Generated) { return; } // check all class entries doc.WalkDocument(this.CheckClasses); }
/// <summary> /// Stores the given data object within the given document. /// </summary> /// <param name="document"> /// The document to store the data within. /// </param> /// <param name="data"> /// The data to store. /// </param> protected void SetDocumentData(CodeDocument document, object data) { Param.RequireNotNull(document, "document"); Param.Ignore(data); if (document.AnalyzerData.ContainsKey(this.Id)) { document.AnalyzerData[this.Id] = data; } else { document.AnalyzerData.Add(this.Id, data); } }
/// <summary> /// This method will analyze the document and call the routine that /// will make the determination whether this rule has been violated. /// </summary> /// <param name="document">A document of things.</param> public override void AnalyzeDocument(CodeDocument document) { var csdocument = (CsDocument)document; if (csdocument.RootElement != null && !csdocument.RootElement.Generated) { csdocument.WalkDocument(this.PublicConstructorsShouldBeDocumented, null, null); csdocument.WalkDocument(this.PublicMethodsShouldBeDocumented, null, null); csdocument.WalkDocument(this.PublicPropertiesShouldBeDocumented, null, null); csdocument.WalkDocument(this.InternalConstructorsShouldBeDocumented, null, null); csdocument.WalkDocument(this.InternalMethodsShouldBeDocumented, null, null); csdocument.WalkDocument(this.InternalPropertiesShouldBeDocumented, null, null); csdocument.WalkDocument(this.RegionMustBeSeparatedByBlankLine, null, null); csdocument.WalkDocument(this.RegionNameMustHaveFirstLetterCapitalized, null, null); } }
public override bool DoAnalysis(CodeDocument document) { var doc = (CsDocument)document; // skipping wrong or auto-generated documents if (doc.RootElement == null || doc.RootElement.Generated) { return true; } if (IsRuleEnabled(document, RuleName)) { doc.WalkDocument(null, null, VisitExpression); } return true; }
/// <summary> /// Exports the violations found within this document into the given xml node. /// </summary> /// <param name="document"> /// The document containing the violations. /// </param> /// <param name="violationsDocument"> /// The xml document in which to store the violation information. /// </param> /// <param name="parentNode"> /// The parent node within this xml document under which to store the violation information. /// </param> internal static void ExportViolations(CodeDocument document, XmlDocument violationsDocument, XmlNode parentNode) { Param.AssertNotNull(document, "document"); Param.AssertNotNull(violationsDocument, "violationsDocument"); Param.AssertNotNull(parentNode, "parentNode"); if (document.DocumentContents != null) { SourceParser.ExportElementViolations(document.DocumentContents, violationsDocument, parentNode); } if (document.SourceCode != null) { // Add the violations from the source code. foreach (Violation violation in document.SourceCode.Violations) { SourceParser.ExportViolation(violation, violationsDocument, parentNode); } } }
//public override void VisitElement(CsElement element, CsElement parentElement, object context) //{ // if (element.ElementType == ElementType.Root) // { // element. // } //} /// <summary> /// Visits the token. /// </summary> /// <param name="document">The document.</param> /// <param name="token">The token.</param> public override void VisitToken(CodeDocument document, CsToken token) { if (token.CsTokenType == CsTokenType.EndOfLine && this.lastTokenWasWhitespace) { this.SourceAnalyzer.AddViolation(document.DocumentContents, token.LineNumber, ContribRule.NoTrailingWhiteSpace); } if (this.isBeginningOfLine) { if ((token.CsTokenType == CsTokenType.WhiteSpace) && (!SpacingAnalyzer.IsValidIndentationWhitespace(token.Text))) { this.SourceAnalyzer.AddViolation(document.DocumentContents, token.LineNumber, ContribRule.IndentUsingTabs); } this.isBeginningOfLine = false; } this.lastTokenWasWhitespace = token.CsTokenType == CsTokenType.WhiteSpace; if (token.CsTokenType == CsTokenType.EndOfLine) this.isBeginningOfLine = true; }
/// <summary> /// Runs the analyzers against the given document. /// </summary> /// <param name="document"> /// The document to analyze. /// </param> /// <param name="parser"> /// The parser that created the document. /// </param> /// <param name="analyzers"> /// The analyzers to run against the document. /// </param> /// <param name="passNumber"> /// The current pass number. /// </param> /// <returns> /// Returns true if analysis was run, or false if analysis was delayed until the next pass. /// </returns> private bool TestAndRunAnalyzers(CodeDocument document, SourceParser parser, IEnumerable<SourceAnalyzer> analyzers, int passNumber) { Param.AssertNotNull(document, "document"); Param.AssertNotNull(parser, "parser"); Param.Ignore(analyzers); Param.Ignore(passNumber); StyleCopTrace.In(document, parser, analyzers, passNumber); if (analyzers == null) { return StyleCopTrace.Out(true); } // Determine whether any of the analyzers wish to delay analysis until the next pass. bool delay = false; foreach (SourceAnalyzer analyzer in analyzers) { if (analyzer.DelayAnalysis(document, passNumber)) { delay = true; break; } } if (!delay) { this.RunAnalyzers(document, parser, analyzers); } return StyleCopTrace.Out(!delay); }
/// <summary> /// Analyzes a code document. /// </summary> /// <param name="document"> /// The document to analyze. /// </param> public virtual void AnalyzeDocument(CodeDocument document) { Param.Ignore(document); }
/// <inheritdoc /> public override bool DoAnalysis(CodeDocument document) { Param.RequireNotNull(document, "document"); CsDocument csdocument = (CsDocument)document; return csdocument.FileHeader == null || !csdocument.FileHeader.UnStyled; }
/// <summary> /// Checks the methods within the given document. /// </summary> /// <param name="document">The document to check.</param> public override void AnalyzeDocument(CodeDocument document) { Param.RequireNotNull(document, "document"); CsDocument csdocument = (CsDocument)document; if (csdocument.RootElement != null && !csdocument.RootElement.Generated) { // Check the access modifier rules. TopLevelElements topLevelElements = new TopLevelElements(); csdocument.WalkDocument<TopLevelElements>( new CodeWalkerElementVisitor<TopLevelElements>(this.ProcessElement), new CodeWalkerStatementVisitor<TopLevelElements>(this.ProcessStatement), new CodeWalkerExpressionVisitor<TopLevelElements>(this.ProcessExpression), topLevelElements); // If there is more than one top-level class in the file, make sure they are all // partial classes and are all of the same type. if (topLevelElements.Classes.Count > 1) { string name = string.Empty; foreach (Class classElement in topLevelElements.Classes) { if (!classElement.Declaration.ContainsModifier(CsTokenType.Partial) || (!string.IsNullOrEmpty(name) && string.Compare(name, classElement.FullNamespaceName, StringComparison.Ordinal) != 0)) { // Set the violation line number to the second class in the file. int count = 0; foreach (Class c in topLevelElements.Classes) { if (count == 1) { this.AddViolation(c, c.LineNumber, Rules.FileMayOnlyContainASingleClass); break; } ++count; } break; } name = classElement.FullNamespaceName; } } // If there is more than one namespace in the file, this is a violation. if (topLevelElements.Namespaces.Count > 1) { // Set the violation line number to the second namespace in the file. int count = 0; foreach (Namespace n in topLevelElements.Namespaces) { if (count == 1) { this.AddViolation(n, n.LineNumber, Rules.FileMayOnlyContainASingleNamespace); break; } ++count; } } } }
public abstract bool ParseFile(SourceCode sourceCode, int passNumber, ref CodeDocument document);
/// <summary> /// Parses a source code document. /// </summary> /// <param name="sourceCode"> /// The source code to parse. /// </param> /// <param name="passNumber"> /// The current pass number. /// </param> /// <param name="document"> /// The parsed representation of the file. /// </param> /// <returns> /// Returns false if no further parsing should be done on this file. /// </returns> public override bool ParseFile(SourceCode sourceCode, int passNumber, ref CodeDocument document) { Param.Ignore(sourceCode, passNumber, document); throw new NotImplementedException(); }
/// <summary> /// Runs the list of analyzers against the given document. /// </summary> /// <param name="document"> /// The document to analyze. /// </param> /// <param name="parser"> /// The parser that created the document. /// </param> /// <param name="analyzers"> /// The list of analyzers to run against the document. /// </param> private void RunAnalyzers(CodeDocument document, SourceParser parser, IEnumerable<SourceAnalyzer> analyzers) { Param.AssertNotNull(document, "document"); Param.AssertNotNull(parser, "parser"); Param.Ignore(analyzers, "analyzers"); StyleCopTrace.In(document, parser, analyzers); if (analyzers != null) { if (parser.SkipAnalysisForDocument(document.SourceCode)) { string format = string.Format(CultureInfo.CurrentCulture, "Skipping: {0} - {1}", document.SourceCode.Project.Location.SubstringAfterLast('\\'), GetRelativeFileName(document.SourceCode)); this.data.Core.SignalOutput(MessageImportance.Normal, format); } else { // Loop through each of the parser's analyzers. // Only call analyzers that are also in the enabled list. foreach (SourceAnalyzer analyzer in parser.Analyzers) { SourceAnalyzer localAnalyzer = analyzer; if (analyzers.Any(enabledAnalyzers => enabledAnalyzers.Id == localAnalyzer.Id)) { // Make sure the user hasn't cancelled us. if (this.data.Core.Cancel) { break; } SourceParser.ClearAnalyzerTags(document); try { if (analyzer.DoAnalysis(document)) { analyzer.AnalyzeDocument(document); } } catch (Exception ex) { StringBuilder details = new StringBuilder(); details.AppendLine(string.Format(CultureInfo.CurrentCulture, "Exception thrown by analyzer '{0}' while processing '{1}'.", analyzer.Name, document.SourceCode.Path)); // Add exception message for help on bugfix. if (!string.IsNullOrEmpty(ex.Message)) { details.AppendLine(string.Format(CultureInfo.CurrentCulture, "Exception message : {0}")); } this.data.Core.SignalOutput(MessageImportance.High, details.ToString()); throw; } } } } } StyleCopTrace.Out(); }
/// <summary> /// Checks whether specified rule is enabled. /// </summary> public override bool IsRuleEnabled(CodeDocument document, string ruleName) { return true; }
/// <summary> /// Saves the given code document results into a cache document. /// </summary> /// <param name="document"> /// The document to save. /// </param> /// <param name="parser"> /// The parser that created the document. /// </param> /// <param name="settingsTimeStamp"> /// The time when the settings were last updated. /// </param> /// <returns> /// Returns true if the document was saved. /// </returns> public bool SaveDocumentResults(CodeDocument document, SourceParser parser, DateTime settingsTimeStamp) { Param.AssertNotNull(document, "document"); Param.AssertNotNull(parser, "parser"); Param.Ignore(settingsTimeStamp); bool success = false; lock (this) { try { XmlDocument xml; if (!this.documentHash.ContainsKey(document.SourceCode.Project.Location)) { XmlNode temp; xml = this.OpenResultsCache(document.SourceCode, parser, out temp); if (xml != null) { this.documentHash.Add(document.SourceCode.Project.Location, xml); } } else { xml = this.documentHash[document.SourceCode.Project.Location]; } if (xml != null) { XmlNode remove = xml.DocumentElement.SelectSingleNode( string.Format(CultureInfo.InvariantCulture, "sourcecode[@name=\"{0}\"][@parser=\"{1}\"]", document.SourceCode.Name, parser.Id)); if (remove != null) { xml.DocumentElement.RemoveChild(remove); } } else { xml = new XmlDocument(); // Create the document node. xml.AppendChild(xml.CreateElement("stylecopresultscache")); // Add the version. XmlNode versionNode = xml.CreateElement("version"); xml.DocumentElement.AppendChild(versionNode); versionNode.InnerText = ResultsCache.Version; if (this.documentHash.ContainsKey(document.SourceCode.Project.Location)) { this.documentHash.Remove(document.SourceCode.Project.Location); } this.documentHash.Add(document.SourceCode.Project.Location, xml); } XmlNode root = xml.CreateElement("sourcecode"); XmlAttribute name = xml.CreateAttribute("name"); name.Value = document.SourceCode.Name; root.Attributes.Append(name); xml.DocumentElement.AppendChild(root); // Create the timestamps node. // We need to store the timestamp of all files that were used to create the violation. // Parser, Rules, settings, source file, spell checker, and dictionaries. XmlElement node = xml.CreateElement("timestamps"); root.AppendChild(node); this.AddTimestampToXml(xml, node, "styleCop", this.core.TimeStamp); this.AddTimestampToXml(xml, node, "settingsFile", settingsTimeStamp); // Stores the last write time of the source code. this.AddTimestampToXml(xml, node, "sourceFile", document.SourceCode.TimeStamp); // Store all the rules and parser timestamps this.AddTimestampToXml(xml, node, "parser", document.SourceCode.Parser.TimeStamp); foreach (SourceAnalyzer analyzer in document.SourceCode.Parser.Analyzers) { this.AddTimestampToXml(xml, node, analyzer.Id, analyzer.TimeStamp); this.AddHashCodeToXml(xml, node, analyzer.Id + ".FilesHashCode", analyzer.GetDependantFilesHashCode(document.SourceCode.Project.Culture)); } // Add the parser ID attribute. if (document.SourceCode.Parser != null) { XmlAttribute attribute = xml.CreateAttribute("parser"); root.Attributes.Append(attribute); attribute.Value = document.SourceCode.Parser.Id; } // Create the violations node. node = xml.CreateElement("violations"); root.AppendChild(node); // Add the violations. SourceParser.ExportViolations(document, xml, node); success = true; } catch (XmlException) { } } return success; }
/// <summary> /// Determines whether the analyzer wishes to delay its analysis until a later pass. /// </summary> /// <param name="document"> /// The document to analyze. /// </param> /// <param name="passNumber"> /// The current pass number. /// </param> /// <returns> /// Returns true if the analysis should be delayed until the next pass, or /// false if the analysis should be performed in the current pass. /// </returns> public virtual bool DelayAnalysis(CodeDocument document, int passNumber) { Param.Ignore(document, passNumber); return(false); }
/// <summary> /// Gets a value indicating whether the given rule is enabled for the given document. /// </summary> /// <param name="document"> /// The document. /// </param> /// <param name="ruleName"> /// The rule to check. /// </param> /// <returns> /// Returns true if the rule is enabled; otherwise false. /// </returns> public virtual bool IsRuleEnabled(CodeDocument document, string ruleName) { Param.Ignore(document, ruleName); return(true); }
/// <summary> /// Give the analyzer a final chance to stop the analysis of this document. /// </summary> /// <param name="document"> /// The document to check to see if we need to actually continue. /// </param> /// <returns> /// True if the document should be analyzed otherwise False. /// </returns> public virtual bool DoAnalysis(CodeDocument document) { Param.Ignore(document); return(true); }
private void ValidateAtLeastOneUsingDirectiveMustBePresent(CodeDocument document) { if (this.usingDirectives.Count() == 0) { DocumentRoot rootElement = ((CsDocument)document).RootElement; this.sourceAnalyzer.AddViolation(rootElement, rootElement.LineNumber, ContribRule.FirstUsingDirectiveMustBeSystem); } }
/// <summary> /// Saves the given code document results into a cache document. /// </summary> /// <param name="document"> /// The document to save. /// </param> /// <param name="parser"> /// The parser that created the document. /// </param> /// <param name="settingsTimeStamp"> /// The time when the settings were last updated. /// </param> /// <returns> /// Returns true if the document was saved. /// </returns> public bool SaveDocumentResults(CodeDocument document, SourceParser parser, DateTime settingsTimeStamp) { Param.AssertNotNull(document, "document"); Param.AssertNotNull(parser, "parser"); Param.Ignore(settingsTimeStamp); bool success = false; lock (this) { try { XmlDocument xml; if (!this.documentHash.ContainsKey(document.SourceCode.Project.Location)) { XmlNode temp; xml = this.OpenResultsCache(document.SourceCode, parser, out temp); if (xml != null) { this.documentHash.Add(document.SourceCode.Project.Location, xml); } } else { xml = this.documentHash[document.SourceCode.Project.Location]; } if (xml != null) { XmlNode remove = xml.DocumentElement.SelectSingleNode( string.Format(CultureInfo.InvariantCulture, "sourcecode[@name=\"{0}\"][@parser=\"{1}\"]", document.SourceCode.Name, parser.Id)); if (remove != null) { xml.DocumentElement.RemoveChild(remove); } } else { xml = new XmlDocument(); // Create the document node. xml.AppendChild(xml.CreateElement("stylecopresultscache")); // Add the version. XmlNode versionNode = xml.CreateElement("version"); xml.DocumentElement.AppendChild(versionNode); versionNode.InnerText = ResultsCache.Version; if (this.documentHash.ContainsKey(document.SourceCode.Project.Location)) { this.documentHash.Remove(document.SourceCode.Project.Location); } this.documentHash.Add(document.SourceCode.Project.Location, xml); } XmlNode root = xml.CreateElement("sourcecode"); XmlAttribute name = xml.CreateAttribute("name"); name.Value = document.SourceCode.Name; root.Attributes.Append(name); xml.DocumentElement.AppendChild(root); // Create the timestamps node. // We need to store the timestamp of all files that were used to create the violation. // Parser, Rules, settings, source file, spell checker, and dictionaries. XmlElement node = xml.CreateElement("timestamps"); root.AppendChild(node); this.AddTimestampToXml(xml, node, "styleCop", this.core.TimeStamp); this.AddTimestampToXml(xml, node, "settingsFile", settingsTimeStamp); // Stores the last write time of the source code. this.AddTimestampToXml(xml, node, "sourceFile", document.SourceCode.TimeStamp); // Store all the rules and parser timestamps this.AddTimestampToXml(xml, node, "parser", document.SourceCode.Parser.TimeStamp); foreach (SourceAnalyzer analyzer in document.SourceCode.Parser.Analyzers) { this.AddTimestampToXml(xml, node, analyzer.Id, analyzer.TimeStamp); this.AddHashCodeToXml(xml, node, analyzer.Id + ".FilesHashCode", analyzer.GetDependantFilesHashCode(document.SourceCode.Project.Culture)); } // Add the parser ID attribute. if (document.SourceCode.Parser != null) { XmlAttribute attribute = xml.CreateAttribute("parser"); root.Attributes.Append(attribute); attribute.Value = document.SourceCode.Parser.Id; } // Create the violations node. node = xml.CreateElement("violations"); root.AppendChild(node); // Add the violations. SourceParser.ExportViolations(document, xml, node); success = true; } catch (XmlException) { } } return(success); }
/// <summary> /// Parses and analyzes the given document. /// </summary> /// <param name="sourceCode"> /// The document to parse and analyze. /// </param> /// <param name="documentStatus"> /// The current status of the documents. /// </param> private void ParseAndAnalyzeDocument(SourceCode sourceCode, DocumentAnalysisStatus documentStatus) { Param.AssertNotNull(sourceCode, "sourceCode"); Param.AssertNotNull(documentStatus, "documentStatus"); StyleCopTrace.In(sourceCode, documentStatus); // Signal the output for this document. this.data.Core.SignalOutput( MessageImportance.Low, string.Format(CultureInfo.CurrentCulture, "Pass {0}: {1}", this.data.PassNumber + 1, GetSignalOutputGetText(sourceCode))); // Extract the document to parse. CodeDocument parsedDocument = documentStatus.Document; // Get or load the analyzer list. IEnumerable <SourceAnalyzer> analyzers = sourceCode.Settings.EnabledAnalyzers; // Parse the document. bool parsingCompleted; try { parsingCompleted = !sourceCode.Parser.ParseFile(sourceCode, this.data.PassNumber, ref parsedDocument); } catch (Exception) { string details = string.Format( CultureInfo.CurrentCulture, "Exception thrown by parser '{0}' while processing '{1}'.", sourceCode.Parser.Name, sourceCode.Path); this.data.Core.SignalOutput(MessageImportance.High, details); throw; } if (parsingCompleted) { if (parsedDocument == null) { string format = string.Format(CultureInfo.CurrentCulture, "Skipping: {0} - {1}", sourceCode.Project.Location.SubstringAfterLast('\\'), GetRelativeFileName(sourceCode)); this.data.Core.SignalOutput(MessageImportance.Normal, format); documentStatus.Complete = true; } else if (this.TestAndRunAnalyzers(parsedDocument, sourceCode.Parser, analyzers, this.data.PassNumber)) { // Analysis of this document is completed. documentStatus.Complete = true; // Save the cache for this document and dispose it. if (this.data.ResultsCache != null && sourceCode.Project.WriteCache) { this.data.ResultsCache.SaveDocumentResults(parsedDocument, sourceCode.Parser, sourceCode.Settings.WriteTime); } parsedDocument.Dispose(); parsedDocument = null; } } if (!documentStatus.Complete) { // Analysis of this document is not complete, so we will need to // perform another round of analysis after this one is finished. this.complete = false; // Cache the document if there is one. if (parsedDocument != null) { documentStatus.Document = parsedDocument; } } StyleCopTrace.Out(); }