internal static int GetId(StringConstant str, Localization.Locale locale) { int output = GetId(str.Value); if (output == -1) { throw ParserException.ThrowException( locale, Localization.ErrorMessages.UNKNOWN_CORE_FUNCTION_ID, str.FirstToken, str.Value); } return(output); }
public void AddDependency(Token throwToken, LocalizedAssemblyView view) { if (this.dependenciesAndViews.ContainsKey(view.Scope)) { if (this.dependenciesAndViews[view.Scope] != view) { throw ParserException.ThrowException( view.Locale, ErrorMessages.CANNOT_IMPORT_SAME_LIBRARY_FROM_DIFFERENT_LOCALES, throwToken); } } this.dependenciesAndViews[view.Scope] = view; this.Metadata.RegisterDependencies(view.Scope.Metadata); }
public void AddDependency(Token throwToken, LocalizedLibraryView libraryView) { if (this == libraryView.LibraryScope) { throw new System.Exception(); // This should not happen. } if (this.dependenciesAndViews.ContainsKey(libraryView.LibraryScope)) { if (this.dependenciesAndViews[libraryView.LibraryScope] != libraryView) { throw ParserException.ThrowException( libraryView.Locale, ErrorMessages.CANNOT_IMPORT_SAME_LIBRARY_FROM_DIFFERENT_LOCALES, throwToken); } } this.dependenciesAndViews[libraryView.LibraryScope] = libraryView; }
// returns an exception so that you can throw this function call in situations where you want // the compiler to think the codepath terminates public System.Exception ThrowEofException() { return(ParserException.ThrowEofException(this.fileName)); }
internal virtual TopLevelEntity Parse( TokenStream tokens, TopLevelEntity owner, FileScope fileScope) { AnnotationCollection annotations = this.parser.AnnotationParser.ParseAnnotations(tokens); ModifierCollection modifiers = ModifierCollection.Parse(tokens); string value = tokens.PeekValue(); if (value == this.parser.Keywords.IMPORT) { throw this.parser.GenerateParseError( ErrorMessages.ALL_IMPORTS_MUST_OCCUR_AT_BEGINNING_OF_FILE, tokens.Pop()); } // The returns are inline, so you'll have to refactor or put the check inside each parse call. // Or maybe a try/finally. TODO.CheckForUnusedAnnotations(); if (value == this.parser.Keywords.NAMESPACE) { return(this.ParseNamespace(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CONST) { return(this.ParseConst(tokens, owner, fileScope, modifiers, annotations)); } if (value == this.parser.Keywords.FUNCTION) { return(this.ParseFunction(tokens, owner, fileScope, modifiers, annotations)); } if (value == this.parser.Keywords.CLASS) { return(this.ParseClassDefinition(tokens, owner, fileScope, modifiers, annotations)); } if (value == this.parser.Keywords.ENUM) { return(this.ParseEnumDefinition(tokens, owner, fileScope, modifiers, annotations)); } if (value == this.parser.Keywords.CONSTRUCTOR && owner is ClassDefinition) { return(this.ParseConstructor(tokens, (ClassDefinition)owner, modifiers, annotations)); } FunctionDefinition nullableFunctionDef = this.MaybeParseFunctionDefinition(tokens, owner, fileScope, annotations, modifiers); if (nullableFunctionDef != null) { return(nullableFunctionDef); } Token token = tokens.Peek(); throw ParserException.ThrowException( this.parser.CurrentLocale, ErrorMessages.UNEXPECTED_TOKEN_NO_SPECIFIC_EXPECTATIONS, token, token.Value); }
// returns an exception so that you can throw this function call in situations where you want // the compiler to think the codepath terminates public System.Exception ThrowEofException() { throw ParserException.ThrowEofException(this.file.Name); }
public TopLevelConstruct ParseTopLevel( TokenStream tokens, TopLevelConstruct owner, FileScope fileScope) { AnnotationCollection annotations = annotations = this.parser.AnnotationParser.ParseAnnotations(tokens); string value = tokens.PeekValue(); // The returns are inline, so you'll have to refactor or put the check inside each parse call. // Or maybe a try/finally. TODO.CheckForUnusedAnnotations(); Token staticToken = null; Token finalToken = null; while (value == this.parser.Keywords.STATIC || value == this.parser.Keywords.FINAL) { if (value == this.parser.Keywords.STATIC && staticToken == null) { staticToken = tokens.Pop(); value = tokens.PeekValue(); } if (value == this.parser.Keywords.FINAL && finalToken == null) { finalToken = tokens.Pop(); value = tokens.PeekValue(); } } if (staticToken != null || finalToken != null) { if (value != this.parser.Keywords.CLASS) { if (staticToken != null) { throw ParserException.ThrowException(this.parser.CurrentLocale, ErrorMessages.ONLY_CLASSES_METHODS_FIELDS_MAY_BE_STATIC, staticToken); } else { throw ParserException.ThrowException(this.parser.CurrentLocale, ErrorMessages.ONLY_CLASSES_MAY_BE_FINAL, finalToken); } } if (staticToken != null && finalToken != null) { throw ParserException.ThrowException(this.parser.CurrentLocale, ErrorMessages.CLASSES_CANNOT_BE_STATIC_AND_FINAL_SIMULTANEOUSLY, staticToken); } } if (value == parser.Keywords.IMPORT) { Token importToken = tokens.PopExpected(parser.Keywords.IMPORT); List <string> importPathBuilder = new List <string>(); while (!tokens.PopIfPresent(";")) { if (importPathBuilder.Count > 0) { tokens.PopExpected("."); } Token pathToken = tokens.Pop(); parser.VerifyIdentifier(pathToken); importPathBuilder.Add(pathToken.Value); } string importPath = string.Join(".", importPathBuilder); return(new ImportStatement(importToken, importPath, parser.CurrentLibrary, fileScope)); } if (value == this.parser.Keywords.NAMESPACE) { return(this.ParseNamespace(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CONST) { return(this.ParseConst(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.FUNCTION) { return(this.ParseFunction(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CLASS) { return(this.ParseClassDefinition(tokens, owner, staticToken, finalToken, fileScope, annotations)); } if (value == this.parser.Keywords.ENUM) { return(this.ParseEnumDefinition(tokens, owner, fileScope, annotations)); } if (value == this.parser.Keywords.CONSTRUCTOR) { return(this.ParseConstructor(tokens, owner, annotations)); } Token token = tokens.Peek(); throw ParserException.ThrowException( this.parser.CurrentLocale, ErrorMessages.UNEXPECTED_TOKEN_NO_SPECIFIC_EXPECTATIONS, token, token.Value); }
public static Token[] Tokenize(string filename, string code, int fileID, bool useMultiCharTokens) { code += '\n'; code += '\0'; int[] lineByIndex = new int[code.Length]; int[] colByIndex = new int[code.Length]; char c; int line = 0; int col = 0; for (int i = 0; i < code.Length; ++i) { c = code[i]; lineByIndex[i] = line; colByIndex[i] = col; if (c == '\n') { ++line; col = -1; } ++col; } List <Token> tokens = new List <Token>(); string commentType = null; string stringType = null; StringBuilder stringToken = null; string normalToken = null; int stringStart = 0; int normalStart = 0; bool previousIsWhitespace = false; bool tokenStartHasPreviousWhitespace = false; string c2; int length = code.Length; for (int i = 0; i < length; ++i) { c = code[i]; c2 = (i >= (length - 1)) ? "" : ("" + c + code[i + 1]); if (c == '\0' && i == length - 1) { // Indicates the end of the stream. Throw an exception in cases where you left something lingering. if (commentType == "*") { ParserException.ThrowEofExceptionWithSuggestion(filename, "This file contains an unclosed comment somewhere."); } if (stringType != null) { Token suspiciousToken = null; foreach (Token suspiciousCheck in tokens) { c = suspiciousCheck.Value[0]; if (c == '"' || c == '\'') { if (suspiciousCheck.Value.Contains("\n")) { suspiciousToken = suspiciousCheck; break; } } } string unclosedStringError = "There is an unclosed string somewhere in this file."; if (suspiciousToken != null) { unclosedStringError += " Line " + (suspiciousToken.Line + 1) + " is suspicious."; } else if (stringStart != 0) { unclosedStringError += " Line " + (lineByIndex[stringStart] + 1) + " is suspicious."; } ParserException.ThrowEofExceptionWithSuggestion(filename, unclosedStringError); } } if (commentType == "/") { if (c == '\n') { commentType = null; } previousIsWhitespace = true; } else if (commentType == "*") { if (c2 == "*/") { commentType = null; ++i; } previousIsWhitespace = true; } else if (stringType != null) { if (c == '\\') { stringToken.Append(c2); ++i; } else if (c == stringType[0]) { stringToken.Append(c); stringType = null; tokens.Add(new Token(stringToken.ToString(), fileID, filename, lineByIndex[stringStart], colByIndex[stringStart], tokenStartHasPreviousWhitespace)); } else { stringToken.Append(c); } previousIsWhitespace = false; } else if (normalToken != null) { if (IsIdentifierChar(c)) { normalToken += c; } else { tokens.Add(new Token(normalToken, fileID, filename, lineByIndex[normalStart], colByIndex[normalStart], tokenStartHasPreviousWhitespace)); --i; normalToken = null; } previousIsWhitespace = false; } else if (useMultiCharTokens && TWO_CHAR_TOKENS.Contains(c2)) { tokens.Add(new Token(c2, fileID, filename, lineByIndex[i], colByIndex[i], previousIsWhitespace)); ++i; previousIsWhitespace = false; } else if (WHITESPACE.Contains(c)) { previousIsWhitespace = true; } else if (c == '"') { stringType = "\""; stringToken = new StringBuilder("" + stringType); stringStart = i; tokenStartHasPreviousWhitespace = previousIsWhitespace; previousIsWhitespace = false; } else if (c == '\'') { stringType = "'"; stringToken = new StringBuilder("" + stringType); stringStart = i; tokenStartHasPreviousWhitespace = previousIsWhitespace; previousIsWhitespace = false; } else if (IsIdentifierChar(c)) { normalToken = "" + c; normalStart = i; tokenStartHasPreviousWhitespace = previousIsWhitespace; previousIsWhitespace = false; } else if (c2 == "//") { commentType = "/"; i += 1; } else if (c2 == "/*") { commentType = "*"; i += 1; } else { tokens.Add(new Token("" + c, fileID, filename, lineByIndex[i], colByIndex[i], previousIsWhitespace)); previousIsWhitespace = false; } } tokens.RemoveAt(tokens.Count - 1); return(tokens.ToArray()); }