protected virtual FunctionDefinition MaybeParseFunctionDefinition( TokenStream tokens, Node owner, FileScope fileScope, AnnotationCollection annotations, ModifierCollection modifiers) { return(null); }
private FunctionDefinition ParseFunction( TokenStream tokens, TopLevelConstruct nullableOwner, FileScope fileScope, AnnotationCollection annotations) { bool isStatic = nullableOwner != null && nullableOwner is ClassDefinition && tokens.PopIfPresent(this.parser.Keywords.STATIC); Token functionToken = tokens.PopExpected(this.parser.Keywords.FUNCTION); Token functionNameToken = tokens.Pop(); this.parser.VerifyIdentifier(functionNameToken); FunctionDefinition fd = new FunctionDefinition(functionToken, parser.CurrentLibrary, nullableOwner, isStatic, functionNameToken, annotations, fileScope); tokens.PopExpected("("); List <Token> argNames = new List <Token>(); List <Expression> defaultValues = new List <Expression>(); bool optionalArgFound = false; while (!tokens.PopIfPresent(")")) { if (argNames.Count > 0) { tokens.PopExpected(","); } Token argName = tokens.Pop(); Expression defaultValue = null; this.parser.VerifyIdentifier(argName); if (tokens.PopIfPresent("=")) { optionalArgFound = true; defaultValue = this.parser.ExpressionParser.Parse(tokens, fd); } else if (optionalArgFound) { throw new ParserException(argName, "All optional arguments must come at the end of the argument list."); } argNames.Add(argName); defaultValues.Add(defaultValue); } IList <Executable> code = ParserContext.ParseBlock(parser, tokens, true, fd); fd.ArgNames = argNames.ToArray(); fd.DefaultValues = defaultValues.ToArray(); fd.Code = code.ToArray(); return(fd); }
protected virtual Namespace ParseNamespace( TokenStream tokens, Node owner, FileScope fileScope, AnnotationCollection annotations) { Token namespaceToken = tokens.PopExpected(this.parser.Keywords.NAMESPACE); Token first = tokens.Pop(); this.parser.VerifyIdentifier(first); List <Token> namespacePieces = new List <Token>() { first }; string namespaceBuilder = first.Value; parser.RegisterNamespace(namespaceBuilder); while (tokens.PopIfPresent(".")) { Token nsToken = tokens.Pop(); this.parser.VerifyIdentifier(nsToken); namespacePieces.Add(nsToken); namespaceBuilder += "." + nsToken.Value; parser.RegisterNamespace(namespaceBuilder); } string name = string.Join(".", namespacePieces.Select <Token, string>(t => t.Value)); Namespace namespaceInstance = new Namespace(namespaceToken, name, owner, fileScope, ModifierCollection.EMPTY, annotations); tokens.PopExpected("{"); List <TopLevelEntity> namespaceMembers = new List <TopLevelEntity>(); while (!tokens.PopIfPresent("}")) { TopLevelEntity executable = this.Parse(tokens, namespaceInstance, fileScope); if (executable is FunctionDefinition || executable is ClassDefinition || executable is EnumDefinition || executable is ConstDefinition || executable is Namespace) { namespaceMembers.Add(executable); } else { throw new ParserException(executable, "Only function, class, and nested namespace declarations may exist as direct members of a namespace."); } } namespaceInstance.Code = namespaceMembers.ToArray(); return(namespaceInstance); }
public void ParseInterpretedCode(string filename, string code) { FileScope fileScope = new FileScope(filename, this.CurrentScope); int fileId = this.GetNextFileId(); this.RegisterFileUsed(filename, code, fileId); Token[] tokenList = Tokenizer.Tokenize(filename, code, fileId, true); TokenStream tokens = new TokenStream(tokenList, filename); List <string> namespaceImportsBuilder = new List <string>(); // Implicitly import the Core library for the current locale. LocalizedLibraryView implicitCoreImport = this.LibraryManager.GetCoreLibrary(this); namespaceImportsBuilder.Add(implicitCoreImport.Name); fileScope.Imports.Add(new ImportStatement(null, implicitCoreImport.Name, this.CurrentLibrary, fileScope)); while (tokens.HasMore && tokens.IsNext(this.Keywords.IMPORT)) { ImportStatement importStatement = this.ExecutableParser.ParseTopLevel(tokens, null, fileScope) as ImportStatement; if (importStatement == null) { throw new Exception(); } namespaceImportsBuilder.Add(importStatement.ImportPath); LocalizedLibraryView localizedLibraryView = this.LibraryManager.GetOrImportLibrary(this, importStatement.FirstToken, importStatement.ImportPath); if (localizedLibraryView == null) { this.unresolvedImports.Add(importStatement); } } string[] namespaceImports = namespaceImportsBuilder.ToArray(); while (tokens.HasMore) { TopLevelConstruct executable = this.ExecutableParser.ParseTopLevel(tokens, null, fileScope); if (executable is ImportStatement) { throw this.GenerateParseError( ErrorMessages.ALL_IMPORTS_MUST_OCCUR_AT_BEGINNING_OF_FILE, executable.FirstToken); } this.CurrentScope.AddExecutable(executable); } }
protected virtual EnumDefinition ParseEnumDefinition( TokenStream tokens, Node owner, FileScope fileScope, ModifierCollection modifiers, AnnotationCollection annotations) { Token enumToken = tokens.PopExpected(this.parser.Keywords.ENUM); Token nameToken = tokens.Pop(); this.parser.VerifyIdentifier(nameToken); string name = nameToken.Value; EnumDefinition ed = new EnumDefinition(enumToken, nameToken, owner, fileScope, modifiers, annotations); tokens.PopExpected("{"); bool nextForbidden = false; List <Token> items = new List <Token>(); List <Expression> values = new List <Expression>(); while (!tokens.PopIfPresent("}")) { if (nextForbidden) { tokens.PopExpected("}"); // crash } Token enumItem = tokens.Pop(); this.parser.VerifyIdentifier(enumItem); if (tokens.PopIfPresent("=")) { values.Add(this.parser.ExpressionParser.Parse(tokens, ed)); } else { values.Add(null); } nextForbidden = !tokens.PopIfPresent(","); items.Add(enumItem); } ed.SetItems(items, values); return(ed); }
public void ParseFile(string filename, string code) { FileScope fileScope = new FileScope(filename, code, this.CurrentScope, this.GetNextFileId()); this.RegisterFileUsed(fileScope, code); TokenStream tokens = new TokenStream(fileScope); List <string> namespaceImportsBuilder = new List <string>(); // Implicitly import the Core library for the current locale. LocalizedAssemblyView implicitCoreImport = this.ScopeManager.GetCoreLibrary(this); namespaceImportsBuilder.Add(implicitCoreImport.Name); fileScope.Imports.Add(new ImportStatement(null, implicitCoreImport.Name, fileScope)); while (tokens.HasMore && (tokens.IsNext(this.Keywords.IMPORT) || (this.IsCSharpCompat && tokens.IsNext("using")))) { ImportStatement importStatement = this.TopLevelParser.ParseImport(tokens, fileScope); if (importStatement == null) { throw new Exception(); } namespaceImportsBuilder.Add(importStatement.ImportPath); LocalizedAssemblyView localizedAssemblyView = this.ScopeManager.GetOrImportAssembly(this, importStatement.FirstToken, importStatement.ImportPath); if (localizedAssemblyView == null) { this.unresolvedImports.Add(importStatement); } } string[] namespaceImports = namespaceImportsBuilder.ToArray(); while (tokens.HasMore) { this.CurrentScope.AddExecutable(this.TopLevelParser.Parse(tokens, null, fileScope)); } }
internal virtual ImportStatement ParseImport(TokenStream tokens, FileScope fileScope) { Token importToken = this.parser.IsCSharpCompat ? tokens.PopExpected(parser.Keywords.IMPORT, "using") : tokens.PopExpected(parser.Keywords.IMPORT); List <string> importPathBuilder = new List <string>(); while (!tokens.PopIfPresent(";")) { if (importPathBuilder.Count > 0) { tokens.PopExpected("."); } Token pathToken = tokens.Pop(); parser.VerifyIdentifier(pathToken); importPathBuilder.Add(pathToken.Value); } string importPath = string.Join(".", importPathBuilder); return(new ImportStatement(importToken, importPath, fileScope)); }
protected abstract FunctionDefinition ParseFunction( TokenStream tokens, TopLevelEntity nullableOwner, FileScope fileScope, ModifierCollection modifiers, AnnotationCollection annotations);
public ParserException(FileScope file, string message) : base(file.Name + ": " + message) { this.file = file; this.OriginalMessage = message; }
protected virtual ClassDefinition ParseClassDefinition( TokenStream tokens, Node owner, FileScope fileScope, ModifierCollection modifiers, AnnotationCollection classAnnotations) { Token classToken = tokens.PopExpected(this.parser.Keywords.CLASS); Token classNameToken = tokens.Pop(); if (classNameToken.Type != TokenType.WORD) { throw new ParserException(classNameToken, "This is not a valid class name."); } List <Token> baseClassTokens = new List <Token>(); List <string> baseClassStrings = new List <string>(); if (tokens.PopIfPresent(":")) { if (baseClassTokens.Count > 0) { tokens.PopExpected(","); } Token baseClassToken = tokens.Pop(); string baseClassName = baseClassToken.Value; this.parser.VerifyIdentifier(baseClassToken); while (tokens.PopIfPresent(".")) { Token baseClassTokenNext = tokens.Pop(); this.parser.VerifyIdentifier(baseClassTokenNext); baseClassName += "." + baseClassTokenNext.Value; } baseClassTokens.Add(baseClassToken); baseClassStrings.Add(baseClassName); } ClassDefinition cd = new ClassDefinition( classToken, classNameToken, baseClassTokens, baseClassStrings, owner, fileScope, modifiers, classAnnotations); tokens.PopExpected("{"); List <FunctionDefinition> methods = new List <FunctionDefinition>(); List <FieldDefinition> fields = new List <FieldDefinition>(); List <PropertyDefinition> properties = new List <PropertyDefinition>(); while (!tokens.PopIfPresent("}")) { this.ParseClassMember(tokens, fileScope, cd, methods, fields, properties); } cd.Methods = methods.ToArray(); cd.Fields = fields.ToArray(); if (cd.Constructor == null) { // This should be empty if there is no base class, or just pass along the base class' args if there is. cd.Constructor = new ConstructorDefinition( cd, ModifierCollection.EMPTY, new AnnotationCollection(parser)); if (cd.BaseClassTokens.Length > 0) { cd.Constructor.BaseToken = cd.FirstToken; cd.Constructor.SetBaseArgs(new Expression[0]); } } return(cd); }
protected abstract ConstDefinition ParseConst( TokenStream tokens, Node owner, FileScope fileScope, ModifierCollection modifiers, AnnotationCollection annotations);
public TopLevelConstruct ParseTopLevel( TokenStream tokens, TopLevelConstruct owner, FileScope fileScope) { AnnotationCollection annotations = annotations = this.parser.AnnotationParser.ParseAnnotations(tokens); string value = tokens.PeekValue(); // The returns are inline, so you'll have to refactor or put the check inside each parse call. // Or maybe a try/finally. TODO.CheckForUnusedAnnotations(); Token staticToken = null; Token finalToken = null; while (value == this.parser.Keywords.STATIC || value == this.parser.Keywords.FINAL) { if (value == this.parser.Keywords.STATIC && staticToken == null) { staticToken = tokens.Pop(); value = tokens.PeekValue(); } if (value == this.parser.Keywords.FINAL && finalToken == null) { finalToken = tokens.Pop(); value = tokens.PeekValue(); } } if (staticToken != null || finalToken != null) { if (value != this.parser.Keywords.CLASS) { if (staticToken != null) { throw new ParserException(staticToken, "Only classes, methods, and fields may be marked as static"); } else { throw new ParserException(finalToken, "Only classes may be marked as final."); } } if (staticToken != null && finalToken != null) { throw new ParserException(staticToken, "Classes cannot be both static and final."); } } if (value == parser.Keywords.IMPORT) { Token importToken = tokens.PopExpected(parser.Keywords.IMPORT); List <string> importPathBuilder = new List <string>(); while (!tokens.PopIfPresent(";")) { if (importPathBuilder.Count > 0) { tokens.PopExpected("."); } Token pathToken = tokens.Pop(); parser.VerifyIdentifier(pathToken); importPathBuilder.Add(pathToken.Value); } string importPath = string.Join(".", importPathBuilder); return(new ImportStatement(importToken, importPath, parser.CurrentLibrary, fileScope)); } if (value == this.parser.Keywords.NAMESPACE) { return(this.ParseNamespace(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CONST) { return(this.ParseConst(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.FUNCTION) { return(this.ParseFunction(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CLASS) { return(this.ParseClassDefinition(tokens, owner, staticToken, finalToken, fileScope, annotations)); } if (value == this.parser.Keywords.ENUM) { return(this.ParseEnumDefinition(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CONSTRUCTOR) { return(this.ParseConstructor(tokens, owner, annotations)); } throw new ParserException(tokens.Peek(), "Unrecognized token."); }
internal virtual TopLevelEntity Parse( TokenStream tokens, TopLevelEntity owner, FileScope fileScope) { AnnotationCollection annotations = this.parser.AnnotationParser.ParseAnnotations(tokens); ModifierCollection modifiers = ModifierCollection.Parse(tokens); string value = tokens.PeekValue(); if (value == this.parser.Keywords.IMPORT) { throw this.parser.GenerateParseError( ErrorMessages.ALL_IMPORTS_MUST_OCCUR_AT_BEGINNING_OF_FILE, tokens.Pop()); } // The returns are inline, so you'll have to refactor or put the check inside each parse call. // Or maybe a try/finally. TODO.CheckForUnusedAnnotations(); if (value == this.parser.Keywords.NAMESPACE) { return(this.ParseNamespace(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CONST) { return(this.ParseConst(tokens, owner, fileScope, modifiers, annotations)); } if (value == this.parser.Keywords.FUNCTION) { return(this.ParseFunction(tokens, owner, fileScope, modifiers, annotations)); } if (value == this.parser.Keywords.CLASS) { return(this.ParseClassDefinition(tokens, owner, fileScope, modifiers, annotations)); } if (value == this.parser.Keywords.ENUM) { return(this.ParseEnumDefinition(tokens, owner, fileScope, modifiers, annotations)); } if (value == this.parser.Keywords.CONSTRUCTOR && owner is ClassDefinition) { return(this.ParseConstructor(tokens, (ClassDefinition)owner, modifiers, annotations)); } FunctionDefinition nullableFunctionDef = this.MaybeParseFunctionDefinition(tokens, owner, fileScope, annotations, modifiers); if (nullableFunctionDef != null) { return(nullableFunctionDef); } Token token = tokens.Peek(); throw ParserException.ThrowException( this.parser.CurrentLocale, ErrorMessages.UNEXPECTED_TOKEN_NO_SPECIFIC_EXPECTATIONS, token, token.Value); }
private ClassDefinition ParseClassDefinition(TokenStream tokens, TopLevelConstruct owner, Token staticToken, Token finalToken, FileScope fileScope, AnnotationCollection classAnnotations) { Token classToken = tokens.PopExpected(this.parser.Keywords.CLASS); Token classNameToken = tokens.Pop(); this.parser.VerifyIdentifier(classNameToken); List <Token> baseClassTokens = new List <Token>(); List <string> baseClassStrings = new List <string>(); if (tokens.PopIfPresent(":")) { if (baseClassTokens.Count > 0) { tokens.PopExpected(","); } Token baseClassToken = tokens.Pop(); string baseClassName = baseClassToken.Value; this.parser.VerifyIdentifier(baseClassToken); while (tokens.PopIfPresent(".")) { Token baseClassTokenNext = tokens.Pop(); this.parser.VerifyIdentifier(baseClassTokenNext); baseClassName += "." + baseClassTokenNext.Value; } baseClassTokens.Add(baseClassToken); baseClassStrings.Add(baseClassName); } ClassDefinition cd = new ClassDefinition( classToken, classNameToken, baseClassTokens, baseClassStrings, owner, parser.CurrentLibrary, staticToken, finalToken, fileScope, classAnnotations); tokens.PopExpected("{"); List <FunctionDefinition> methods = new List <FunctionDefinition>(); List <FieldDeclaration> fields = new List <FieldDeclaration>(); ConstructorDefinition constructorDef = null; ConstructorDefinition staticConstructorDef = null; while (!tokens.PopIfPresent("}")) { AnnotationCollection annotations = this.parser.AnnotationParser.ParseAnnotations(tokens); if (tokens.IsNext(this.parser.Keywords.FUNCTION) || tokens.AreNext(this.parser.Keywords.STATIC, this.parser.Keywords.FUNCTION)) { methods.Add(this.parser.ExecutableParser.ParseFunction(tokens, cd, fileScope, annotations)); } else if (tokens.IsNext(this.parser.Keywords.CONSTRUCTOR)) { if (constructorDef != null) { throw this.parser.GenerateParseError( ErrorMessages.CLASS_CANNOT_HAVE_MULTIPLE_CONSTRUCTORS, tokens.Pop()); } constructorDef = this.parser.ExecutableParser.ParseConstructor(tokens, cd, annotations); } else if (tokens.AreNext(this.parser.Keywords.STATIC, this.parser.Keywords.CONSTRUCTOR)) { tokens.Pop(); // static token if (staticConstructorDef != null) { throw new ParserException(tokens.Pop(), "Multiple static constructors are not allowed."); } staticConstructorDef = this.parser.ExecutableParser.ParseConstructor(tokens, cd, annotations); } else if (tokens.IsNext(this.parser.Keywords.FIELD) || tokens.AreNext(this.parser.Keywords.STATIC, this.parser.Keywords.FIELD)) { fields.Add(this.parser.ExecutableParser.ParseField(tokens, cd, annotations)); } else { tokens.PopExpected("}"); } TODO.CheckForUnusedAnnotations(); } cd.Methods = methods.ToArray(); cd.Constructor = constructorDef; cd.StaticConstructor = staticConstructorDef; cd.Fields = fields.ToArray(); return(cd); }
private ConstStatement ParseConst(TokenStream tokens, TopLevelConstruct owner, FileScope fileScope, AnnotationCollection annotations) { Token constToken = tokens.PopExpected(this.parser.Keywords.CONST); Token nameToken = tokens.Pop(); ConstStatement constStatement = new ConstStatement(constToken, nameToken, owner, parser.CurrentLibrary, fileScope, annotations); this.parser.VerifyIdentifier(nameToken); tokens.PopExpected("="); constStatement.Expression = this.parser.ExpressionParser.Parse(tokens, constStatement); tokens.PopExpected(";"); return(constStatement); }
public TopLevelConstruct ParseTopLevel( TokenStream tokens, TopLevelConstruct owner, FileScope fileScope) { AnnotationCollection annotations = annotations = this.parser.AnnotationParser.ParseAnnotations(tokens); string value = tokens.PeekValue(); // The returns are inline, so you'll have to refactor or put the check inside each parse call. // Or maybe a try/finally. TODO.CheckForUnusedAnnotations(); Token staticToken = null; Token finalToken = null; while (value == this.parser.Keywords.STATIC || value == this.parser.Keywords.FINAL) { if (value == this.parser.Keywords.STATIC && staticToken == null) { staticToken = tokens.Pop(); value = tokens.PeekValue(); } if (value == this.parser.Keywords.FINAL && finalToken == null) { finalToken = tokens.Pop(); value = tokens.PeekValue(); } } if (staticToken != null || finalToken != null) { if (value != this.parser.Keywords.CLASS) { if (staticToken != null) { throw ParserException.ThrowException(this.parser.CurrentLocale, ErrorMessages.ONLY_CLASSES_METHODS_FIELDS_MAY_BE_STATIC, staticToken); } else { throw ParserException.ThrowException(this.parser.CurrentLocale, ErrorMessages.ONLY_CLASSES_MAY_BE_FINAL, finalToken); } } if (staticToken != null && finalToken != null) { throw ParserException.ThrowException(this.parser.CurrentLocale, ErrorMessages.CLASSES_CANNOT_BE_STATIC_AND_FINAL_SIMULTANEOUSLY, staticToken); } } if (value == parser.Keywords.IMPORT) { Token importToken = tokens.PopExpected(parser.Keywords.IMPORT); List <string> importPathBuilder = new List <string>(); while (!tokens.PopIfPresent(";")) { if (importPathBuilder.Count > 0) { tokens.PopExpected("."); } Token pathToken = tokens.Pop(); parser.VerifyIdentifier(pathToken); importPathBuilder.Add(pathToken.Value); } string importPath = string.Join(".", importPathBuilder); return(new ImportStatement(importToken, importPath, parser.CurrentLibrary, fileScope)); } if (value == this.parser.Keywords.NAMESPACE) { return(this.ParseNamespace(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CONST) { return(this.ParseConst(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.FUNCTION) { return(this.ParseFunction(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CLASS) { return(this.ParseClassDefinition(tokens, owner, staticToken, finalToken, fileScope, annotations)); } if (value == this.parser.Keywords.ENUM) { return(this.ParseEnumDefinition(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CONSTRUCTOR) { return(this.ParseConstructor(tokens, owner, annotations)); } Token token = tokens.Peek(); throw ParserException.ThrowException( this.parser.CurrentLocale, ErrorMessages.UNEXPECTED_TOKEN_NO_SPECIFIC_EXPECTATIONS, token, token.Value); }
public FileScopedEntityLookup SetFileScope(FileScope fileScope) { this.fileScope = fileScope; return(this); }
public static Token[] Tokenize(FileScope file) { Localization.Locale locale = file.CompilationScope.Locale; string code = file.Content; // Add a newline and a dummy character at the end. // Set the length equal to the code with the newline but without the null terminator. // This makes dereferencing the index + 1 code simpler and all makes the check for the end // of word tokens and single-line comments easy. code += "\n\0"; int length = code.Length - 1; int[] lineByIndex = new int[code.Length]; int[] colByIndex = new int[code.Length]; char c; int line = 0; int col = 0; for (int i = 0; i < code.Length; ++i) { c = code[i]; lineByIndex[i] = line; colByIndex[i] = col; if (c == '\n') { ++line; col = -1; } ++col; } List <Token> tokens = new List <Token>(); TokenMode mode = TokenMode.NORMAL; char modeSubtype = ' '; int tokenStart = 0; string tokenValue; char c2; bool isTokenEnd = false; bool stringIsRaw = false; for (int i = 0; i < length; ++i) { c = code[i]; switch (mode) { case TokenMode.COMMENT: if (modeSubtype == '*') { if (c == '*' && code[i + 1] == '/') { ++i; mode = TokenMode.NORMAL; } } else { if (c == '\n') { mode = TokenMode.NORMAL; } } break; case TokenMode.NORMAL: if (WHITESPACE.Contains(c)) { // do nothing } else if (c == '/' && (code[i + 1] == '/' || code[i + 1] == '*')) { mode = TokenMode.COMMENT; modeSubtype = code[++i]; } else if (IsIdentifierChar(c)) { tokenStart = i; mode = TokenMode.WORD; } else if (c == '"' | c == '\'') { tokenStart = i; mode = TokenMode.STRING; modeSubtype = c; stringIsRaw = tokens.Count > 0 && tokens[tokens.Count - 1].Value == "@"; } else { if (c == '.') { c2 = code[i + 1]; if (c2 >= '0' && c2 <= '9') { mode = TokenMode.WORD; tokenStart = i++; } } if (mode == TokenMode.NORMAL) { tokens.Add(new Token(c.ToString(), TokenType.PUNCTUATION, file, lineByIndex[i], colByIndex[i])); } } break; case TokenMode.STRING: if (c == modeSubtype) { tokenValue = code.Substring(tokenStart, i - tokenStart + 1); tokens.Add(new Token(tokenValue, TokenType.STRING, file, lineByIndex[i], colByIndex[i])); mode = TokenMode.NORMAL; } else if (!stringIsRaw && c == '\\') { ++i; } break; case TokenMode.WORD: isTokenEnd = false; if (IsIdentifierChar(c)) { // do nothing } else if (c == '.') { if (code[tokenStart] >= '0' && code[tokenStart] <= '9') { // do nothing } else { isTokenEnd = true; } } else { isTokenEnd = true; } if (isTokenEnd) { tokenValue = code.Substring(tokenStart, i - tokenStart); c = tokenValue[0]; TokenType type = TokenType.WORD; if ((c >= '0' && c <= '9') || c == '.') { type = TokenType.NUMBER; } else if (!locale.Keywords.IsValidVariable(tokenValue)) { type = TokenType.KEYWORD; } tokens.Add(new Token(tokenValue, type, file, lineByIndex[tokenStart], colByIndex[tokenStart])); mode = TokenMode.NORMAL; --i; } break; } } switch (mode) { case TokenMode.COMMENT: throw new ParserException(file, "There is an unclosed comment in this file."); case TokenMode.STRING: throw new ParserException(file, "There is an unclosed string in this file."); case TokenMode.WORD: throw new System.InvalidOperationException(); default: break; } return(tokens.ToArray()); }
public TokenStream(FileScope file) { this.file = file; this.innerStream = new InnerTokenStream(Tokenizer.Tokenize(file)); }
private void RegisterFileUsed(FileScope file, string code) { this.filesUsed.Add(file.ID, file.Name + "\n" + code); }