public Node parse(string code, Options options) { // Program program, toString; //toString = String; //if (typeof code !== "string" && !(code instanceof String)) { // code = toString(code); //} source = code; index = 0; lineNumber = (source.Length > 0) ? 1 : 0; lineStart = 0; startIndex = index; startLineNumber = lineNumber; startLineStart = lineStart; length = source.Length; lookahead = null; state = new State() { allowIn = true, allowYield = true, labelSet = new List<string>(), inFunctionBody = false, inIteration = false, inSwitch = false, lastCommentStart = -1, curlyStack = new Stack<string>(), sourceType = "script" }; strict = false; extra = new Extra(); if (options != null) { extra.range = options.range; extra.loc = options.loc; extra.attachComment = options.attachComment; //if (extra.loc && options.source != null && options.source != undefined) { // extra.source = toString(options.source); //} if (options.tokens) { extra.tokens = new List<Token>(); } if (options.comment) { extra.comments = new List<Comment>(); } if (options.tolerant) { extra.errors = new List<Error>(); } if (extra.attachComment) { extra.range = true; extra.comments = new List<Comment>(); extra.bottomRightStack = new List<Token>(); extra.trailingComments = new List<Comment>(); extra.leadingComments = new List<Comment>(); } if (options.sourceType == "module") { // very restrictive condition for now state.sourceType = options.sourceType; strict = true; } } //try //{ var program = parseProgram(); //if (typeof extra.comments !== "undefined") { // program.comments = extra.comments; //} //if (typeof extra.tokens !== "undefined") { // filterTokenLocation(); // program.tokens = extra.tokens; //} //if (typeof extra.errors !== "undefined") { // program.errors = extra.errors; //} //} //catch (Exception e) //{ // throw e; //} //finally //{ // extra = new Extra(); //} return program; }
public Program Parse(ICodeGeneration codeGeneration, string code) { _codeGeneration = codeGeneration; _lineNumber = (code.Length > 0) ? 1 : 0; _lineStart = 0; _length = code.Length; _buffer = null; _state = new State { AllowIn = true, LabelSet = new Dictionary<string, object>(), LastParenthesized = null, InFunctionBody = false, InIteration = false, InSwitch = false }; _extra = new Extra(); if (_length > 0) { _source = StringToArray(code).ToList(); } return ParseProgram(); }
public List<Token> tokenize(string code, Options options) { //var toString; List<Token> tokens; //toString = String; //if (typeof code !== "string" && !(code instanceof String)) { // code = toString(code); //} source = code; index = 0; lineNumber = (source.Length > 0) ? 1 : 0; lineStart = 0; startIndex = index; startLineNumber = lineNumber; startLineStart = lineStart; length = source.Length; lookahead = null; state = new State() { allowIn = true, allowYield = true, labelSet = new List<string>(), inFunctionBody = false, inIteration = false, inSwitch = false, lastCommentStart = -1, curlyStack = new Stack<string>() }; extra = new Extra(); // Options matching. options = options ?? new Options(); // Of course we collect tokens here. options.tokens = true; extra.tokens = new List<Token>(); extra.tokenize = true; // The following two fields are necessary to compute the Regex tokens. extra.openParenToken = -1; extra.openCurlyToken = -1; extra.range = options.range; extra.loc = options.loc; if (options.comment) { extra.comments = new List<Comment>(); } if (options.tolerant) { extra.errors = new List<Error>(); } //try //{ peek(); if (lookahead.type == TokenType.EOF) { return extra.tokens; } lex(); while (lookahead.type != TokenType.EOF) { try { lex(); } catch (Error lexError) { if (extra.errors != null) { recordError(lexError); // We have to break on the first error // to avoid infinite loops. break; } else { throw lexError; } } } filterTokenLocation(); tokens = extra.tokens; //tokens.comments = extra.comments; //tokens.errors = extra.errors; //} //catch (Exception e) //{ // throw e; //} //finally //{ // extra = new Extra(); //} return tokens; }