public static Compilation Compile(string source)
		{
			Compilation compilation = new Compilation();
			compilation.Tokenization = Tokenizer.Tokenize(source, compilation);
			compilation.Parsed = Parser.Parse(compilation.Tokenization, compilation);
			return compilation;
		}
Beispiel #2
0
		public static List<ParserToken> Parse(List<Token> tokens, Compilation compilation)
		{
			List<ParserToken> parserTokens = new List<ParserToken>();

			int index = 0;
			while(true)
			{
				int max = -1;
				ParserRuleId? maxId = null;
				foreach(ParserRule rule in parserRules)
				{
					int matchLength = rule.Matches(tokens, index);
					if (matchLength > max)
					{
						max = matchLength;
						maxId = rule.Id;
					}
				}
				if(max == -1)
				{
					++index;
					if (index >= tokens.Count)
						break;

					compilation.ReportDiagnostic(tokens[index].Location, "Unparseable token.");
					continue;
				}
				else
				{
					parserTokens.Add(
						new ParserToken(
							maxId.Value,
							tokens.Skip(index).Take(max)
						)
					);
					index += max;
					if (index >= tokens.Count)
						break;
				}
			}
			return parserTokens;
		}
		public static List<Token> Tokenize(string source, Compilation compilation)
		{
			int index = 0;
			Location location = new Location()
			{
				Column = 1,
				Line = 1
			};
			List<Token> tokenization = new List<Token>();
			while(true)
			{
				if (index >= source.Length)
					break;

				int max = -1;
				TokenPattern maxPattern = default(TokenPattern);
				foreach(TokenPattern pattern in tokens)
				{
					int current = pattern.Matches(source, index);
					if (current > max)
					{
						max = current;
						maxPattern = pattern;
					}
				}
				if (maxPattern.Matches == null)
				{
					compilation.ReportDiagnostic(location, "No matching token.");
					if(source[index] == '\n')
					{
						location.Column = 1;
						++location.Line;
					}
					else
					{
						++location.Column;
					}
					index++;
					continue;
				}

				tokenization.Add(
					new Token(
						maxPattern.Id,
						source.Substring(index, max),
						location
					)
				);
				for(int i = 0; i < max; ++i)
				{
					if (source[index + i] == '\n')
					{
						location.Column = 1;
						++location.Line;
					}
					else
					{
						++location.Column;
					}
				}
				index += max;
			}
			return tokenization;
		}