private IEnumerable <DiagnosticRecord> FindSeparatorViolations(TokenOperations tokenOperations) { Func <LinkedListNode <Token>, bool> predicate = node => { return(node.Next != null && node.Next.Value.Kind != TokenKind.NewLine && node.Next.Value.Kind != TokenKind.EndOfInput && // semicolon can be followed by end of input !IsPreviousTokenApartByWhitespace(node.Next)); }; foreach (var tokenNode in tokenOperations.GetTokenNodes(IsSeparator).Where(predicate)) { var errorKind = tokenNode.Value.Kind == TokenKind.Comma ? ErrorKind.SeparatorComma : ErrorKind.SeparatorSemi; yield return(getDiagnosticRecord( tokenNode.Value, errorKind, GetCorrections( tokenNode.Previous.Value, tokenNode.Value, tokenNode.Next.Value, true, false))); } }
private static List <Tuple <IScriptExtent, IScriptExtent> > GetExtents( TokenOperations tokenOps, HashtableAst hashtableAst) { var nodeTuples = new List <Tuple <IScriptExtent, IScriptExtent> >(); foreach (var kvp in hashtableAst.KeyValuePairs) { var keyStartOffset = kvp.Item1.Extent.StartOffset; bool keyStartOffSetReached = false; var keyTokenNode = tokenOps.GetTokenNodes( token => { if (keyStartOffSetReached) { return(token.Kind == TokenKind.Equals); } if (token.Extent.StartOffset == keyStartOffset) { keyStartOffSetReached = true; } return(false); }).FirstOrDefault(); if (keyTokenNode == null || keyTokenNode.Value == null) { continue; } var assignmentToken = keyTokenNode.Value.Extent; nodeTuples.Add(new Tuple <IScriptExtent, IScriptExtent>( kvp.Item1.Extent, assignmentToken)); } return(nodeTuples); }
private IEnumerable <DiagnosticRecord> FindOperatorViolations(TokenOperations tokenOperations) { Func <LinkedListNode <Token>, bool> predicate = tokenNode => { return(tokenNode.Previous != null && IsPreviousTokenOnSameLine(tokenNode) && IsPreviousTokenApartByWhitespace(tokenNode)); }; foreach (var tokenNode in tokenOperations.GetTokenNodes(IsOperator)) { var hasWhitespaceBefore = predicate(tokenNode); var hasWhitespaceAfter = predicate(tokenNode.Next); if (!hasWhitespaceAfter || !hasWhitespaceBefore) { yield return(new DiagnosticRecord( GetError(ErrorKind.Operator), tokenNode.Value.Extent, GetName(), GetDiagnosticSeverity(), tokenOperations.Ast.Extent.File, null, GetCorrections( tokenNode.Previous.Value, tokenNode.Value, tokenNode.Next.Value, hasWhitespaceBefore, hasWhitespaceAfter))); } } }
private IEnumerable <DiagnosticRecord> FindOpenBraceViolations(TokenOperations tokenOperations) { foreach (var lcurly in tokenOperations.GetTokenNodes(TokenKind.LCurly)) { if (lcurly.Previous == null || !IsPreviousTokenOnSameLine(lcurly) || lcurly.Previous.Value.Kind == TokenKind.LCurly || ((lcurly.Previous.Value.TokenFlags & TokenFlags.MemberName) == TokenFlags.MemberName)) { continue; } if (!IsPreviousTokenApartByWhitespace(lcurly)) { yield return(new DiagnosticRecord( GetError(ErrorKind.Brace), lcurly.Value.Extent, GetName(), GetDiagnosticSeverity(), tokenOperations.Ast.Extent.File, null, GetCorrections(lcurly.Previous.Value, lcurly.Value, lcurly.Next.Value, false, true).ToList())); } } }
private IEnumerable <DiagnosticRecord> FindOperatorViolations(TokenOperations tokenOperations) { foreach (var tokenNode in tokenOperations.GetTokenNodes(IsOperator)) { if (tokenNode.Previous == null || tokenNode.Next == null || tokenNode.Value.Kind == TokenKind.DotDot) { continue; } var hasWhitespaceBefore = IsPreviousTokenOnSameLineAndApartByWhitespace(tokenNode); var hasWhitespaceAfter = IsPreviousTokenOnSameLineAndApartByWhitespace(tokenNode.Next); if (!hasWhitespaceAfter || !hasWhitespaceBefore) { yield return(new DiagnosticRecord( GetError(ErrorKind.Operator), tokenNode.Value.Extent, GetName(), GetDiagnosticSeverity(), tokenOperations.Ast.Extent.File, null, GetCorrections( tokenNode.Previous.Value, tokenNode.Value, tokenNode.Next.Value, hasWhitespaceBefore, hasWhitespaceAfter))); } } }
private static List <Tuple <IScriptExtent, IScriptExtent> > GetExtents( TokenOperations tokenOps, HashtableAst hashtableAst) { var nodeTuples = new List <Tuple <IScriptExtent, IScriptExtent> >(); foreach (var kvp in hashtableAst.KeyValuePairs) { var keyStartOffset = kvp.Item1.Extent.StartOffset; var keyTokenNode = tokenOps.GetTokenNodes( token => token.Extent.StartOffset == keyStartOffset).FirstOrDefault(); if (keyTokenNode == null || keyTokenNode.Next == null || keyTokenNode.Next.Value.Kind != TokenKind.Equals) { return(null); } nodeTuples.Add(new Tuple <IScriptExtent, IScriptExtent>( kvp.Item1.Extent, keyTokenNode.Next.Value.Extent)); } return(nodeTuples); }
private IEnumerable <DiagnosticRecord> FindPipeViolations(TokenOperations tokenOperations) { foreach (var pipe in tokenOperations.GetTokenNodes(TokenKind.Pipe)) { if (pipe.Next == null || !IsPreviousTokenOnSameLine(pipe) || pipe.Next.Value.Kind == TokenKind.Pipe || pipe.Next.Value.Kind == TokenKind.NewLine || pipe.Next.Value.Kind == TokenKind.LineContinuation ) { continue; } if (!IsNextTokenApartByWhitespace(pipe, out bool hasRedundantWhitespace)) { if (CheckPipeForRedundantWhitespace && hasRedundantWhitespace || CheckPipe && !hasRedundantWhitespace) { yield return(new DiagnosticRecord( GetError(ErrorKind.AfterPipe), pipe.Value.Extent, GetName(), GetDiagnosticSeverity(), tokenOperations.Ast.Extent.File, null, GetCorrections(pipe.Previous.Value, pipe.Value, pipe.Next.Value, true, false).ToList())); } } } foreach (var pipe in tokenOperations.GetTokenNodes(TokenKind.Pipe)) { if (pipe.Previous == null || !IsPreviousTokenOnSameLine(pipe) || pipe.Previous.Value.Kind == TokenKind.Pipe || pipe.Previous.Value.Kind == TokenKind.NewLine || pipe.Previous.Value.Kind == TokenKind.LineContinuation ) { continue; } if (!IsPreviousTokenApartByWhitespace(pipe, out bool hasRedundantWhitespace)) { if (CheckPipeForRedundantWhitespace && hasRedundantWhitespace || CheckPipe && !hasRedundantWhitespace) { yield return(new DiagnosticRecord( GetError(ErrorKind.BeforePipe), pipe.Value.Extent, GetName(), GetDiagnosticSeverity(), tokenOperations.Ast.Extent.File, null, GetCorrections(pipe.Previous.Value, pipe.Value, pipe.Next.Value, false, true).ToList())); } } } }
private IEnumerable <DiagnosticRecord> FindInnerBraceViolations(TokenOperations tokenOperations) { foreach (var lCurly in tokenOperations.GetTokenNodes(TokenKind.LCurly)) { if (lCurly.Next == null || !IsPreviousTokenOnSameLine(lCurly) || lCurly.Next.Value.Kind == TokenKind.NewLine || lCurly.Next.Value.Kind == TokenKind.LineContinuation || lCurly.Next.Value.Kind == TokenKind.RCurly ) { continue; } if (!IsNextTokenApartByWhitespace(lCurly)) { yield return(new DiagnosticRecord( GetError(ErrorKind.AfterOpeningBrace), lCurly.Value.Extent, GetName(), GetDiagnosticSeverity(), tokenOperations.Ast.Extent.File, null, GetCorrections(lCurly.Previous.Value, lCurly.Value, lCurly.Next.Value, true, false).ToList())); } } foreach (var rCurly in tokenOperations.GetTokenNodes(TokenKind.RCurly)) { if (rCurly.Previous == null || !IsPreviousTokenOnSameLine(rCurly) || rCurly.Previous.Value.Kind == TokenKind.LCurly || rCurly.Previous.Value.Kind == TokenKind.NewLine || rCurly.Previous.Value.Kind == TokenKind.LineContinuation || rCurly.Previous.Value.Kind == TokenKind.AtCurly ) { continue; } if (!IsPreviousTokenApartByWhitespace(rCurly)) { yield return(new DiagnosticRecord( GetError(ErrorKind.BeforeClosingBrace), rCurly.Value.Extent, GetName(), GetDiagnosticSeverity(), tokenOperations.Ast.Extent.File, null, GetCorrections(rCurly.Previous.Value, rCurly.Value, rCurly.Next.Value, false, true).ToList())); } } }
/// <summary> /// Helper method to create a stub script file and then call FoldableRegions /// </summary> private static FoldingReference[] GetRegions(string text) { ScriptFile scriptFile = new( // Use any absolute path. Even if it doesn't exist. DocumentUri.FromFileSystemPath(Path.Combine(Path.GetTempPath(), "TestFile.ps1")), text, Version.Parse("5.0")); FoldingReference[] result = TokenOperations.FoldableReferences(scriptFile.ScriptTokens).ToArray(); // The foldable regions need to be deterministic for testing so sort the array. Array.Sort(result); return(result); }
private IEnumerable <DiagnosticRecord> FindOperatorViolations(TokenOperations tokenOperations) { foreach (var tokenNode in tokenOperations.GetTokenNodes(IsOperator)) { if (tokenNode.Previous == null || tokenNode.Next == null || tokenNode.Value.Kind == TokenKind.DotDot) { continue; } // exclude unary operator for cases like $foo.bar(-$Var) if (TokenTraits.HasTrait(tokenNode.Value.Kind, TokenFlags.UnaryOperator) && tokenNode.Previous.Value.Kind == TokenKind.LParen && tokenNode.Next.Value.Kind == TokenKind.Variable) { continue; } // exclude assignment operator inside of multi-line hash tables if requested if (IgnoreAssignmentOperatorInsideHashTable && tokenNode.Value.Kind == TokenKind.Equals) { Ast containingAst = tokenOperations.GetAstPosition(tokenNode.Value); if (containingAst is HashtableAst && containingAst.Extent.EndLineNumber != containingAst.Extent.StartLineNumber) { continue; } } var hasWhitespaceBefore = IsPreviousTokenOnSameLineAndApartByWhitespace(tokenNode); var hasWhitespaceAfter = tokenNode.Next.Value.Kind == TokenKind.NewLine || IsPreviousTokenOnSameLineAndApartByWhitespace(tokenNode.Next); if (!hasWhitespaceAfter || !hasWhitespaceBefore) { yield return(new DiagnosticRecord( GetError(ErrorKind.Operator), tokenNode.Value.Extent, GetName(), GetDiagnosticSeverity(), tokenOperations.Ast.Extent.File, null, GetCorrections( tokenNode.Previous.Value, tokenNode.Value, tokenNode.Next.Value, hasWhitespaceBefore, hasWhitespaceAfter))); } } }
public static object Main(string operation, params object[] args) { if (Runtime.Trigger == TriggerType.Verification) { if (ContractOwner.Length == 20) { // if param ContractOwner is script hash //return Runtime.CheckWitness(ContractOwner); return(false); } else if (ContractOwner.Length == 33) { // if param ContractOwner is public key byte[] signature = operation.AsByteArray(); return(VerifySignature(signature, ContractOwner)); } } else if (Runtime.Trigger == TriggerType.VerificationR) { return(true); } else if (Runtime.Trigger == TriggerType.Application) { var result = AdminOperations.HandleAdminOperation(operation, args); if (result.IsComplete) { return(result.Value); } result = NepOperations.HandleNepOperation(operation, args); if (result.IsComplete) { return(result.Value); } result = TokenOperations.HandleTokenOperation(operation, args); if (result.IsComplete) { return(result.Value); } result = GameOperations.HandleGameOperation(operation, args); if (result.IsComplete) { return(result.Value); } } return(false); }
public HttpResponseMessage Get() { var query = Request.GetQueryNameValuePairs().ToDictionary(k => k.Key); if (!query.ContainsKey("code")) { return(RedirectTo("failed")); } var code = query["code"].Value; TokenOperations.FromAuthorizationCode(code); return(RedirectTo("success")); }
/// <summary> /// Analyzes the given ast to find the [violation] /// </summary> /// <param name="ast">AST to be analyzed. This should be non-null</param> /// <param name="fileName">Name of file that corresponds to the input AST.</param> /// <returns>A an enumerable type containing the violations</returns> public override IEnumerable <DiagnosticRecord> AnalyzeScript(Ast ast, string fileName) { if (ast == null) { throw new ArgumentNullException("ast"); } var tokenOperations = new TokenOperations(Helper.Instance.Tokens, ast); var diagnosticRecords = Enumerable.Empty <DiagnosticRecord>(); foreach (var violationFinder in violationFinders) { diagnosticRecords = diagnosticRecords.Concat(violationFinder(tokenOperations)); } return(diagnosticRecords.ToArray()); // force evaluation here }
private static CorrectionExtent GetCorrectionToRemoveFuncParamDecl( FunctionDefinitionAst funcDefnAst, Ast ast, Token[] tokens) { var funcDefnTokens = TokenOperations.GetTokens(ast, funcDefnAst, tokens).ToArray(); var lParenTokenIdx = Array.FindIndex(funcDefnTokens, tok => tok.Kind == TokenKind.LParen); var rParenTokenIdx = Array.FindIndex(funcDefnTokens, tok => tok.Kind == TokenKind.RParen); return(new CorrectionExtent( funcDefnTokens[lParenTokenIdx - 1].Extent.EndLineNumber, funcDefnTokens[rParenTokenIdx].Extent.EndLineNumber, funcDefnTokens[lParenTokenIdx - 1].Extent.EndColumnNumber, funcDefnTokens[rParenTokenIdx].Extent.EndColumnNumber, "", ast.Extent.File)); }
/// <summary> /// Analyzes the given ast to find violations. /// </summary> /// <param name="ast">AST to be analyzed. This should be non-null</param> /// <param name="fileName">Name of file that corresponds to the input AST.</param> /// <returns>A an enumerable type containing the violations</returns> public override IEnumerable <DiagnosticRecord> AnalyzeScript(Ast ast, string fileName) { if (ast == null) { throw new ArgumentNullException("ast"); } // TODO Should have the following option // * no-empty-lines-after var diagnosticRecords = new List <DiagnosticRecord>(); if (!Enable) { return(diagnosticRecords); } var tokens = Helper.Instance.Tokens; // Ignore open braces that are part of arguments to a command // * E.g. get-process | % { "blah } // In the above case even if OnSameLine == false, we should not // flag the open brace as it would move the brace to the next line // and will invalidate the command var tokenOps = new TokenOperations(tokens, ast); tokensToIgnore = new HashSet <Token>(tokenOps.GetOpenBracesInCommandElements()); // Ignore open braces that are part of a one line if-else statement // E.g. $x = if ($true) { "blah" } else { "blah blah" } if (IgnoreOneLineBlock) { foreach (var pair in tokenOps.GetBracePairsOnSameLine()) { tokensToIgnore.Add(pair.Item1); } } foreach (var violationFinder in violationFinders) { diagnosticRecords.AddRange(violationFinder(tokens, ast, fileName)); } return(diagnosticRecords); }
/// <summary> /// Analyzes the given ast to find if consecutive assignment statements are aligned. /// </summary> /// <param name="ast">AST to be analyzed. This should be non-null</param> /// <param name="fileName">Name of file that corresponds to the input AST.</param> /// <returns>A an enumerable type containing the violations</returns> public override IEnumerable <DiagnosticRecord> AnalyzeScript(Ast ast, string fileName) { if (ast == null) { throw new ArgumentNullException("ast"); } // only handles one line assignments // if the rule encounters assignment statements that are multi-line, the rule will ignore that block var tokenOps = new TokenOperations(Helper.Instance.Tokens, ast); foreach (var violationFinder in violationFinders) { foreach (var diagnosticRecord in violationFinder(tokenOps)) { yield return(diagnosticRecord); } } }
public Task <Container <FoldingRange> > Handle(FoldingRangeRequestParam request, CancellationToken cancellationToken) { if (cancellationToken.IsCancellationRequested) { _logger.LogDebug("FoldingRange request canceled for file: {0}", request.TextDocument.Uri); return(Task.FromResult(new Container <FoldingRange>())); } // TODO Should be using dynamic registrations if (!_configurationService.CurrentSettings.CodeFolding.Enable) { return(null); } // Avoid crash when using untitled: scheme or any other scheme where the document doesn't // have a backing file. https://github.com/PowerShell/vscode-powershell/issues/1676 // Perhaps a better option would be to parse the contents of the document as a string // as opposed to reading a file but the scenario of "no backing file" probably doesn't // warrant the extra effort. if (!_workspaceService.TryGetFile(request.TextDocument.Uri, out ScriptFile scriptFile)) { return(null); } var result = new List <FoldingRange>(); // If we're showing the last line, decrement the Endline of all regions by one. int endLineOffset = _configurationService.CurrentSettings.CodeFolding.ShowLastLine ? -1 : 0; foreach (FoldingReference fold in TokenOperations.FoldableReferences(scriptFile.ScriptTokens).References) { result.Add(new FoldingRange { EndCharacter = fold.EndCharacter, EndLine = fold.EndLine + endLineOffset, Kind = fold.Kind, StartCharacter = fold.StartCharacter, StartLine = fold.StartLine }); } return(Task.FromResult(new Container <FoldingRange>(result))); }
private IEnumerable <DiagnosticRecord> FindOperatorViolations(TokenOperations tokenOperations) { foreach (var tokenNode in tokenOperations.GetTokenNodes(IsOperator)) { if (tokenNode.Previous == null || tokenNode.Next == null || tokenNode.Value.Kind == TokenKind.DotDot) { continue; } // exclude unary operator for cases like $foo.bar(-$Var) if (TokenTraits.HasTrait(tokenNode.Value.Kind, TokenFlags.UnaryOperator) && tokenNode.Previous.Value.Kind == TokenKind.LParen && tokenNode.Next.Value.Kind == TokenKind.Variable) { continue; } var hasWhitespaceBefore = IsPreviousTokenOnSameLineAndApartByWhitespace(tokenNode); var hasWhitespaceAfter = tokenNode.Next.Value.Kind == TokenKind.NewLine || IsPreviousTokenOnSameLineAndApartByWhitespace(tokenNode.Next); if (!hasWhitespaceAfter || !hasWhitespaceBefore) { yield return(new DiagnosticRecord( GetError(ErrorKind.Operator), tokenNode.Value.Extent, GetName(), GetDiagnosticSeverity(), tokenOperations.Ast.Extent.File, null, GetCorrections( tokenNode.Previous.Value, tokenNode.Value, tokenNode.Next.Value, hasWhitespaceBefore, hasWhitespaceAfter))); } } }
private IEnumerable <DiagnosticRecord> FindOpenParenViolations(TokenOperations tokenOperations) { foreach (var lparen in tokenOperations.GetTokenNodes(TokenKind.LParen)) { if (lparen.Previous != null && IsPreviousTokenOnSameLine(lparen) && TokenTraits.HasTrait(lparen.Previous.Value.Kind, TokenFlags.Keyword) && IsKeyword(lparen.Previous.Value) && !IsPreviousTokenApartByWhitespace(lparen)) { yield return(new DiagnosticRecord( GetError(ErrorKind.Paren), lparen.Value.Extent, GetName(), GetDiagnosticSeverity(), tokenOperations.Ast.Extent.File, null, GetCorrections(lparen.Previous.Value, lparen.Value, lparen.Next.Value, false, true).ToList())); } } }
private IEnumerable <DiagnosticRecord> FindHashtableViolations(TokenOperations tokenOps) { var hashtableAsts = tokenOps.Ast.FindAll(ast => ast is HashtableAst, true); if (hashtableAsts == null) { yield break; } // it is probably much easier have a hashtable writer that formats the hashtable and writes it // but it makes handling comments hard. So we need to use this approach. // This is how the algorithm actually works: // if each key value pair are on a separate line // find all the assignment operators // if all the assignment operators are aligned (check the column number of each assignment operator) // skip // else // find the distance between the assignment operators and their corresponding LHS // find the longest left expression // make sure all the assignment operators are in the same column as that of the longest left hand. foreach (var astItem in hashtableAsts) { var hashtableAst = (HashtableAst)astItem; if (!HasKeysOnSeparateLines(hashtableAst)) { continue; } var extentTuples = GetExtents(tokenOps, hashtableAst); if (extentTuples == null || extentTuples.Count == 0 || !extentTuples.All(t => t.Item1.StartLineNumber == t.Item2.EndLineNumber)) { continue; } var widestKeyExtent = extentTuples .Select(t => t.Item1) .Aggregate((t1, tAggregate) => { return(TokenOperations.GetExtentWidth(tAggregate) > TokenOperations.GetExtentWidth(t1) ? tAggregate : t1); }); var expectedStartColumnNumber = widestKeyExtent.EndColumnNumber + 1; foreach (var extentTuple in extentTuples) { if (extentTuple.Item2.StartColumnNumber != expectedStartColumnNumber) { yield return(new DiagnosticRecord( GetError(), extentTuple.Item2, GetName(), GetDiagnosticSeverity(), extentTuple.Item1.File, null, GetHashtableCorrections(extentTuple, expectedStartColumnNumber).ToList())); } } } }
/// <summary> /// Analyzes the given ast to find violations. /// </summary> /// <param name="ast">AST to be analyzed. This should be non-null</param> /// <param name="fileName">Name of file that corresponds to the input AST.</param> /// <returns>A an enumerable type containing the violations</returns> public override IEnumerable <DiagnosticRecord> AnalyzeScript(Ast ast, string fileName) { if (ast == null) { throw new ArgumentNullException("ast"); } if (!Enable) { return(Enumerable.Empty <DiagnosticRecord>()); } // TODO Should have the following options // * no-empty-lines-before var tokens = Helper.Instance.Tokens; var diagnosticRecords = new List <DiagnosticRecord>(); var curlyStack = new Stack <Tuple <Token, int> >(); // TODO move part common with PlaceOpenBrace to one place var tokenOps = new TokenOperations(tokens, ast); tokensToIgnore = new HashSet <Token>(tokenOps.GetCloseBracesInCommandElements()); // Ignore close braces that are part of a one line if-else statement // E.g. $x = if ($true) { "blah" } else { "blah blah" } if (IgnoreOneLineBlock) { foreach (var pair in tokenOps.GetBracePairsOnSameLine()) { tokensToIgnore.Add(pair.Item2); } } for (int k = 0; k < tokens.Length; k++) { var token = tokens[k]; if (token.Kind == TokenKind.LCurly || token.Kind == TokenKind.AtCurly) { curlyStack.Push(new Tuple <Token, int>(token, k)); continue; } if (token.Kind == TokenKind.RCurly) { if (curlyStack.Count > 0) { var openBraceToken = curlyStack.Peek().Item1; var openBracePos = curlyStack.Pop().Item2; // Ignore if a one line hashtable if (openBraceToken.Kind == TokenKind.AtCurly && openBraceToken.Extent.StartLineNumber == token.Extent.StartLineNumber) { continue; } foreach (var violationFinder in violationFinders) { AddToDiagnosticRecords( violationFinder(tokens, k, openBracePos, fileName), ref diagnosticRecords); } } else { break; } } } return(diagnosticRecords); }
private IEnumerable <DiagnosticRecord> FindHashtableViolations(TokenOperations tokenOps) { var hashtableAsts = tokenOps.Ast.FindAll(ast => ast is HashtableAst, true); var groups = new List <List <Tuple <IScriptExtent, IScriptExtent> > >(); if (hashtableAsts != null) { foreach (var astItem in hashtableAsts) { groups.Add(GetExtents(tokenOps, (HashtableAst)astItem)); } } #if !PSV3 var configAsts = tokenOps.Ast.FindAll(ast => ast is ConfigurationDefinitionAst, true); if (configAsts != null) { // There are probably parse errors caused by an "Undefined DSC resource" // which prevents the parser from detecting the property value pairs as // hashtable. Hence, this is a workaround to format configurations which // have "Undefined DSC resource" parse errors. // find all commandAsts of the form "prop" "=" "val" that have the same parent // and format those pairs. foreach (var configAst in configAsts) { groups.AddRange(GetCommandElementExtentGroups(configAst)); } } #endif // it is probably much easier have a hashtable writer that formats the hashtable and writes it // but it makes handling comments hard. So we need to use this approach. // This is how the algorithm actually works: // if each key value pair are on a separate line // find all the assignment operators // if all the assignment operators are aligned (check the column number of each assignment operator) // skip // else // find the distance between the assignment operators and their corresponding LHS // find the longest left expression // make sure all the assignment operators are in the same column as that of the longest left hand. foreach (var extentTuples in groups) { if (!HasPropertiesOnSeparateLines(extentTuples)) { continue; } if (extentTuples == null || extentTuples.Count == 0 || !extentTuples.All(t => t.Item1.StartLineNumber == t.Item2.EndLineNumber)) { continue; } var expectedStartColumnNumber = extentTuples.Max(x => x.Item1.EndColumnNumber) + 1; foreach (var extentTuple in extentTuples) { if (extentTuple.Item2.StartColumnNumber != expectedStartColumnNumber) { yield return(new DiagnosticRecord( GetError(), extentTuple.Item2, GetName(), GetDiagnosticSeverity(), extentTuple.Item1.File, null, GetHashtableCorrections(extentTuple, expectedStartColumnNumber).ToList())); } } } }