// // Check if a given scope contains the requested position. // // If it does, add all variables defined in the scope to the // output list, and also recursively check any child scopes. // private void AddScopeTree(LexicalScope scope, List <Variable> ret, int line, int column) { if (scope.StartLine > line) { return; } if (scope.EndLine < line) { return; } if ((scope.StartLine == line) && (scope.StartColumn > column)) { return; } if ((scope.EndLine == line) && (scope.EndColumn < column)) { return; } ret.AddRange(scope.Variables); if (scope.ChildScopes == null) { return; } foreach (var child in scope.ChildScopes) { AddScopeTree(child, ret, line, column); } }
// // Reset the contents of the project to a pristine state. // // This method is designed to return the Project object to a blank // configuration as if no code had been parsed. This should be done // prior to any large-scale parsing operation to ensure that the // data extracted from the parsed code does not contain duplicated // or stale information. // private void ResetContents() { Files = new Dictionary <string, SourceFile>(); Scopes = new Dictionary <string, List <LexicalScope> >(); GlobalScope = new LexicalScope(); FunctionSignatures = new Dictionary <string, FunctionSignature>(); StructureDefinitions = new Dictionary <string, Structure>(); SumTypes = new Dictionary <string, SumType>(); StrongAliases = new Dictionary <string, StrongAlias>(); WeakAliases = new Dictionary <string, WeakAlias>(); var helper = new ParseSession.ErrorListHelper(); var ErrorProvider = new ErrorListProvider(helper); ErrorProvider.ProviderName = "Epoch Language"; ErrorProvider.ProviderGuid = new Guid(VsPackage.PackageGuid); ErrorProvider.Tasks.Clear(); // TODO - this is probably too brute force }
private static bool ParseEntity(ParseSession parser, LexicalScope parentscope, int starttoken, out int consumedtokens) { consumedtokens = starttoken; int totaltokens = starttoken; if (parser.CheckToken(totaltokens, "if")) { if (!parser.CheckToken(totaltokens + 1, "(")) { return(false); } totaltokens += 2; var expr = Expression.Parse(parser, totaltokens, out totaltokens); if (expr == null) { return(false); } while (parser.CheckToken(totaltokens, ")")) { ++totaltokens; } if (!parser.CheckToken(totaltokens, "{")) { return(false); } ++totaltokens; ParseCodeBlock(parser, parentscope, totaltokens, out totaltokens); while (parser.CheckToken(totaltokens, "elseif")) { totaltokens += 2; var condexpr = Expression.Parse(parser, totaltokens, out totaltokens); if (condexpr == null) { return(false); } while (parser.CheckToken(totaltokens, ")")) { ++totaltokens; } if (!parser.CheckToken(totaltokens, "{")) { return(false); } ++totaltokens; ParseCodeBlock(parser, parentscope, totaltokens, out totaltokens); } if (parser.CheckToken(totaltokens, "else")) { ++totaltokens; if (!parser.CheckToken(totaltokens, "{")) { return(false); } ++totaltokens; ParseCodeBlock(parser, parentscope, totaltokens, out totaltokens); } consumedtokens = totaltokens; return(true); } else if (parser.CheckToken(totaltokens, "while")) { if (!parser.CheckToken(totaltokens + 1, "(")) { return(false); } totaltokens += 2; while (parser.CheckToken(totaltokens, ")")) { ++totaltokens; } var expr = Expression.Parse(parser, totaltokens, out totaltokens); ++totaltokens; if (!parser.CheckToken(totaltokens, "{")) { return(false); } ++totaltokens; ParseCodeBlock(parser, parentscope, totaltokens, out totaltokens); consumedtokens = totaltokens; return(true); } return(false); }
// // Helper routine to parse a lexical scope in its entirety given a token stream. // internal static LexicalScope Parse(ParseSession parser, LexicalScope parentscope, int starttoken, out int consumedtokens) { consumedtokens = starttoken; int totaltokens = starttoken; Token afterStartBrace = parser.PeekToken(totaltokens); if (afterStartBrace == null) { parser.ConsumeTokens(starttoken); throw new SyntaxError("Missing closing }", parser.ReversePeekToken()); } var ret = new LexicalScope(); ret.File = afterStartBrace.File; ret.StartLine = afterStartBrace.Line; ret.StartColumn = afterStartBrace.Column; ret.ParentScope = parentscope; if (parentscope != null) { if (parentscope.ChildScopes == null) { parentscope.ChildScopes = new List <LexicalScope>(); } parentscope.ChildScopes.Add(ret); } while (!parser.CheckToken(totaltokens, "}")) { if (CodeHelpers.ParseEntity(parser, ret, totaltokens, out totaltokens) || parser.ParsePreopStatement(totaltokens, out totaltokens) || parser.ParsePostopStatement(totaltokens, out totaltokens) || parser.ParseStatement(totaltokens, out totaltokens)) { parser.ConsumeTokens(totaltokens); totaltokens = 0; continue; } var variable = Variable.Parse(parser, totaltokens, out totaltokens, Variable.Origins.Local); if (variable != null) { ret.Variables.Add(variable); parser.ConsumeTokens(totaltokens); totaltokens = 0; continue; } if (CodeHelpers.ParseAssignment(parser, totaltokens, out totaltokens)) { parser.ConsumeTokens(totaltokens); totaltokens = 0; continue; } consumedtokens = totaltokens; return(null); } Token endBrace = parser.PeekToken(totaltokens); ++totaltokens; consumedtokens = totaltokens; ret.EndLine = endBrace.Line; ret.EndColumn = endBrace.Column; return(ret); }
// // Helper routine for parsing a function from a token stream. // This helper will also extract the code body of the function. // // Consumes tokens on success. Returns null if a function definition // is not matched. Success returns a function signature with exactly // one overload; callers are expected to merge this with any other // applicable overload lists. // public static FunctionSignature Parse(ParseSession parser) { var nametoken = parser.PeekToken(0); if (nametoken == null || string.IsNullOrEmpty(nametoken.Text)) { return(null); } int totaltokens = 1; if (parser.CheckToken(totaltokens, "<")) { ++totaltokens; if (!parser.ParseTemplateParameters(totaltokens, nametoken, out totaltokens)) { return(null); } } if (!parser.CheckToken(totaltokens, ":")) { return(null); } ++totaltokens; var overload = new FunctionOverload(); if (!parser.CheckToken(totaltokens, "[")) { overload.Parameters = ParseFunctionParams(parser, totaltokens, out totaltokens); overload.Return = FunctionReturn.Parse(parser, totaltokens, out totaltokens); } var tags = ParseFunctionTags(parser, totaltokens, out totaltokens); overload.Tags = tags; if (parser.CheckToken(totaltokens, "{")) { ++totaltokens; var scope = LexicalScope.Parse(parser, null, totaltokens, out totaltokens); overload.Scope = scope; if (overload.Scope != null && overload.Parameters != null) { foreach (var p in overload.Parameters) { var v = new Variable { Name = p.Name, Type = p.Type, Origin = Variable.Origins.Parameter }; overload.Scope.Variables.Add(v); } } } parser.ConsumeTokens(totaltokens); var ret = new FunctionSignature { Name = nametoken, Overloads = new List <FunctionOverload>() }; ret.Overloads.Add(overload); return(ret); }