public void TokenizeString() { var lexer = new BaZicLexer(); var test1 = "VARIABLE var1 = \"Hello\""; var result = lexer.Tokenize(test1); Assert.AreEqual(6, result.Count); Assert.AreEqual(TokenType.String, result[4].TokenType); Assert.AreEqual("Hello", result[4].Value); test1 = "VARIABLE var1 = \"Hello\" \"World\""; result = lexer.Tokenize(test1); Assert.AreEqual(7, result.Count); Assert.AreEqual("Hello", result[4].Value); Assert.AreEqual("World", result[5].Value); test1 = "VARIABLE var1 = \"Hello\"+\"World\""; result = lexer.Tokenize(test1); Assert.AreEqual(8, result.Count); Assert.AreEqual("Hello", result[4].Value); Assert.AreEqual(TokenType.Plus, result[5].TokenType); Assert.AreEqual("World", result[6].Value); test1 = "VARIABLE var1 = \"Hello\\\"World\\\"\""; result = lexer.Tokenize(test1); Assert.AreEqual(6, result.Count); Assert.AreEqual(TokenType.String, result[4].TokenType); Assert.AreEqual("Hello\\\"World\\\"", result[4].Value); }
public void CommentTokenDefinitionMatch() { var previousTokenType = TokenType.NotDefined; var definition = new CommentTokenDefinition(); var lexer = new BaZicLexer(); lexer.InputCode = "Identifier # This is a comment\n" + "Another line of code"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "# This is a comment\n" + "Another line of code"; var match = definition.Match(lexer, 0, previousTokenType); Assert.IsTrue(match.IsMatch); Assert.AreEqual(TokenType.Comment, match.TokenType); Assert.AreEqual("# This is a comment", match.Value); lexer.InputCode = "# This is a comment"; match = definition.Match(lexer, 0, previousTokenType); Assert.IsTrue(match.IsMatch); Assert.AreEqual("# This is a comment", match.Value); }
public void KeywordTokenDefinitionMatch() { var previousTokenType = TokenType.NotDefined; var definition = new TokenDefinition(TokenType.Async, "ASYNC"); var lexer = new BaZicLexer(); lexer.InputCode = "Identifier Async"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "Async2 Function2"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "ASYNC Identifier"; var match = definition.Match(lexer, 0, previousTokenType); Assert.IsTrue(match.IsMatch); Assert.AreEqual(TokenType.Async, match.TokenType); Assert.AreEqual("ASYNC", match.Value); lexer.InputCode = "Async function"; match = definition.Match(lexer, 0, previousTokenType); Assert.IsTrue(match.IsMatch); Assert.AreEqual("ASYNC", match.Value); }
/// <inheritdoc/> internal override TokenMatch Match(BaZicLexer lexer, int startIndex, TokenType previousTokenType) { if (lexer.InputCode.IndexOf("#", startIndex, StringComparison.OrdinalIgnoreCase) == startIndex) { var endLine = lexer.InputCode.IndexOf("\n", startIndex, StringComparison.OrdinalIgnoreCase); var value = string.Empty; if (endLine == -1) { value = lexer.InputCode; } else { value = lexer.InputCode.Substring(startIndex, endLine - startIndex); } return(new TokenMatch { IsMatch = true, TokenType = TokenType.Comment, Value = value, ParsedLength = value.Length }); } return(new TokenMatch { IsMatch = false }); }
public void TokenizeBasicTest() { var lexer = new BaZicLexer(); var test1 = " VARIABLE var1"; var result = lexer.Tokenize(test1); Assert.AreEqual(4, result.Count); Assert.AreEqual(TokenType.StartCode, result[0].TokenType); Assert.AreEqual(TokenType.Variable, result[1].TokenType); Assert.AreEqual("VARIABLE", result[1].Value); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual("var1", result[2].Value); Assert.AreEqual(TokenType.EndCode, result[3].TokenType); result = lexer.Tokenize(test1, keepWhitespaces: true); Assert.AreEqual(7, result.Count); Assert.AreEqual(TokenType.StartCode, result[0].TokenType); Assert.AreEqual(TokenType.Variable, result[2].TokenType); Assert.AreEqual("VARIABLE", result[2].Value); Assert.AreEqual(TokenType.Identifier, result[5].TokenType); Assert.AreEqual("var1", result[5].Value); Assert.AreEqual(TokenType.EndCode, result[6].TokenType); }
public void TokenizeInvalid1() { var lexer = new BaZicLexer(); var test1 = "VARIABLE { @var1"; var result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Variable, result[1].TokenType); Assert.AreEqual("VARIABLE", result[1].Value); Assert.AreEqual(TokenType.NotDefined, result[2].TokenType); Assert.AreEqual("{", result[2].Value); Assert.AreEqual(TokenType.NotDefined, result[3].TokenType); Assert.AreEqual("@var1", result[3].Value); Assert.AreEqual(TokenType.EndCode, result[4].TokenType); test1 = "VARIABLE var1 = 1;"; result = lexer.Tokenize(test1); Assert.AreEqual(6, result.Count); Assert.AreEqual(TokenType.NotDefined, result[4].TokenType); }
public void TokenizeInvalid2() { var lexer = new BaZicLexer(); var test1 = "VARIABLE var1 = 1;"; var result = lexer.Tokenize(test1); Assert.AreEqual(6, result.Count); Assert.AreEqual(TokenType.NotDefined, result[4].TokenType); }
/// <summary> /// Try to match the current token with the code /// </summary> /// <param name="lexer">The instance of the <see cref="BaZicLexer"/> that contains the input code.</param> /// <param name="startIndex">The index in the input code where the search must start.</param> /// <param name="previousTokenType">The token type that has been detected previously this match tentative</param> /// <returns>A <see cref="TokenMatch"/> that contains information about the match result.</returns> internal virtual TokenMatch Match(BaZicLexer lexer, int startIndex, TokenType previousTokenType) { if (string.IsNullOrEmpty(_keyword)) { throw new NotSupportedException(L.BaZic.Lexer.NoMatchImplemented); } if (lexer.InputCode.IndexOf(_keyword, startIndex, StringComparison.OrdinalIgnoreCase) == startIndex) { if (_expectSpaceAfter) { var separatorIndex = startIndex + _keywordLength; if (lexer.InputCode.Length == separatorIndex || SymbolHelper.Separators.Any(c => char.Parse(c) == lexer.InputCode[separatorIndex])) { var value = _keyword; if (_keepOriginalValue) { value = lexer.InputCode.Substring(startIndex, _keyword.Length); } return(new TokenMatch { IsMatch = true, TokenType = TokenType, Value = value, ParsedLength = _keywordLength }); } } else { var value = _keyword; if (_keepOriginalValue) { value = lexer.InputCode.Substring(startIndex, _keyword.Length); } return(new TokenMatch { IsMatch = true, TokenType = TokenType, Value = value, ParsedLength = _keywordLength }); } } return(new TokenMatch { IsMatch = false }); }
public void DoubleTokenDefinitionMatch() { var previousTokenType = TokenType.NotDefined; var definition = new DoubleTokenDefinition(); var lexer = new BaZicLexer(); lexer.InputCode = "Identifier111.2345"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "111.2345Identifier"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "11 123.45"; // The "11" is NOT considered as the double. Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "111,2345"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "11,123.45"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "100"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "10.0"; Assert.IsTrue(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "111.1234 identifier"; var match = definition.Match(lexer, 0, previousTokenType); Assert.IsTrue(match.IsMatch); Assert.AreEqual(TokenType.Double, match.TokenType); Assert.AreEqual("111.1234", match.Value); lexer.InputCode = "-111.1234 identifier"; Assert.AreEqual("-111.1234", definition.Match(lexer, 0, previousTokenType).Value); lexer.InputCode = "+111.1234 identifier"; Assert.AreEqual("111.1234", definition.Match(lexer, 0, previousTokenType).Value); lexer.InputCode = "111.1234"; Assert.AreEqual("111.1234", definition.Match(lexer, 0, previousTokenType).Value); lexer.InputCode = ".45"; Assert.AreEqual("0.45", definition.Match(lexer, 0, previousTokenType).Value); lexer.InputCode = "-.45"; Assert.AreEqual("-0.45", definition.Match(lexer, 0, previousTokenType).Value); }
public void TokenizeVariableArray() { var lexer = new BaZicLexer(); var test1 = "VARIABLE var1[]"; var result = lexer.Tokenize(test1); Assert.AreEqual(6, result.Count); Assert.AreEqual(TokenType.Variable, result[1].TokenType); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual(TokenType.LeftBracket, result[3].TokenType); Assert.AreEqual(TokenType.RightBracket, result[4].TokenType); Assert.AreEqual(TokenType.EndCode, result[5].TokenType); }
public void TokenizeVariableDefaultValue() { var lexer = new BaZicLexer(); var test1 = "VARIABLE var1 = true"; var result = lexer.Tokenize(test1); Assert.AreEqual(6, result.Count); Assert.AreEqual(TokenType.Variable, result[1].TokenType); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual(TokenType.Equal, result[3].TokenType); Assert.AreEqual(TokenType.True, result[4].TokenType); Assert.AreEqual(TokenType.EndCode, result[5].TokenType); }
/// <inheritdoc/> internal override TokenMatch Match(BaZicLexer lexer, int startIndex, TokenType previousTokenType) { var closestSeparatorIndex = lexer.InputCode.Length; var index = lexer.InputCode.IndexOfAny(SymbolHelper.SymbolSeparators.Except(new char[] { '.' }).ToArray(), startIndex + 1); if (index > startIndex && index < closestSeparatorIndex) { closestSeparatorIndex = index; } if (closestSeparatorIndex > startIndex) { var valueString = lexer.InputCode.Substring(startIndex, closestSeparatorIndex - startIndex); var value = 0.0; if (double.TryParse(valueString, NumberStyles.AllowDecimalPoint | NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out value)) { if (!valueString.ToString().Contains(".")) { return(new TokenMatch { IsMatch = false }); } else if ((previousTokenType == TokenType.Identifier || previousTokenType == TokenType.Integer || previousTokenType == TokenType.Double || previousTokenType == TokenType.String) && (valueString.StartsWith("+", StringComparison.Ordinal) || valueString.StartsWith("-", StringComparison.Ordinal))) { // It's probably a case where we have "1+1" and the current input code is "+1". // We want to consider a + as a Plus instead of a positive number. return(new TokenMatch { IsMatch = false }); } return(new TokenMatch { IsMatch = true, TokenType = TokenType.Double, Value = value.ToString(CultureInfo.InvariantCulture), ParsedLength = closestSeparatorIndex - startIndex }); } } return(new TokenMatch { IsMatch = false }); }
private void RunProgramReleaseButton_Click(object sender, RoutedEventArgs e) { if (string.IsNullOrWhiteSpace(BaZicCodeTextBox.Text)) { MessageBox.Show("There is no BaZic code to run."); return; } Logs = string.Empty; if (string.IsNullOrWhiteSpace(XamlCodeTextBox.Text)) { XamlCodeTextBox.Text = string.Empty; } var lexer = new BaZicLexer(); var parser = new BaZicParser(); var tokens = lexer.Tokenize(BaZicCodeTextBox.Text); var abstractSyntaxTree = parser.Parse(tokens, XamlCodeTextBox.Text, optimize: OptimizeCheckBox.IsChecked.Value); foreach (var issue in abstractSyntaxTree.Issues.InnerExceptions.OfType <BaZicParserException>()) { Log(issue.ToString()); } if (abstractSyntaxTree.Program != null && abstractSyntaxTree.Issues.InnerExceptions.OfType <BaZicParserException>().All(issue => issue.Level != Core.Enums.BaZicParserExceptionLevel.Error)) { RunProgramButton.Visibility = Visibility.Collapsed; RunProgramReleaseButton.Visibility = Visibility.Collapsed; OptimizeCheckBox.Visibility = Visibility.Collapsed; PauseButton.Visibility = Visibility.Visible; StopButton.Visibility = Visibility.Visible; _interpreter = new BaZicInterpreter(abstractSyntaxTree.Program); _interpreter.StateChanged += Interpreter_StateChanged; var t = _interpreter.StartReleaseAsync(true); } else { Log("The program has not been build."); } }
public void StringTokenDefinitionMatch() { var previousTokenType = TokenType.NotDefined; var definition = new StringTokenDefinition(); var lexer = new BaZicLexer(); lexer.InputCode = "Identifier \"Hello\n" + "World\""; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "\"Hello\n" + "World"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "\"Hello World"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "\"Hello\n" + "World\" Identifier"; var match = definition.Match(lexer, 0, previousTokenType); Assert.IsTrue(match.IsMatch); Assert.AreEqual(TokenType.String, match.TokenType); Assert.AreEqual("Hello\nWorld", match.Value); lexer.InputCode = "\"Hello \\n World\""; match = definition.Match(lexer, 0, previousTokenType); Assert.IsTrue(match.IsMatch); Assert.AreEqual("Hello \\n World", match.Value); lexer.InputCode = "\"Hello \\\" World\""; Assert.AreEqual("Hello \\\" World", definition.Match(lexer, 0, previousTokenType).Value); lexer.InputCode = "\"Hello\"\"World\""; Assert.AreEqual("Hello", definition.Match(lexer, 0, previousTokenType).Value); lexer.InputCode = "\"Hello\"Identifier\"World\""; Assert.AreEqual("Hello", definition.Match(lexer, 0, previousTokenType).Value); }
public void TokenizeVariableArrayWithValues() { var lexer = new BaZicLexer(); var test1 = "VARIABLE var1[] = [\"Hello\", \"World\"]"; var result = lexer.Tokenize(test1); Assert.AreEqual(12, result.Count); Assert.AreEqual(TokenType.Variable, result[1].TokenType); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual(TokenType.LeftBracket, result[3].TokenType); Assert.AreEqual(TokenType.RightBracket, result[4].TokenType); Assert.AreEqual(TokenType.Equal, result[5].TokenType); Assert.AreEqual(TokenType.LeftBracket, result[6].TokenType); Assert.AreEqual(TokenType.String, result[7].TokenType); Assert.AreEqual(TokenType.Comma, result[8].TokenType); Assert.AreEqual(TokenType.String, result[9].TokenType); Assert.AreEqual(TokenType.RightBracket, result[10].TokenType); Assert.AreEqual(TokenType.EndCode, result[11].TokenType); }
/// <inheritdoc/> internal override TokenMatch Match(BaZicLexer lexer, int startIndex, TokenType previousTokenType) { if (lexer.InputCode.IndexOf("\"", startIndex, StringComparison.OrdinalIgnoreCase) == startIndex) { var nextSearchStart = startIndex + 1; var endQuoteIndex = -1; do { endQuoteIndex = lexer.InputCode.IndexOf("\"", nextSearchStart, StringComparison.OrdinalIgnoreCase); if (endQuoteIndex > nextSearchStart) { if (lexer.InputCode[endQuoteIndex - 1] == '\\') { nextSearchStart = endQuoteIndex + 1; endQuoteIndex = -1; } } else if (endQuoteIndex == -1) { nextSearchStart = lexer.InputCode.Length; } }while (endQuoteIndex == -1 && nextSearchStart < lexer.InputCode.Length); if (endQuoteIndex > -1) { return(new TokenMatch { IsMatch = true, TokenType = TokenType.String, Value = lexer.InputCode.Substring(startIndex + 1, endQuoteIndex - startIndex - 1), ParsedLength = 1 + endQuoteIndex - startIndex }); } } return(new TokenMatch { IsMatch = false }); }
public void IdentifierTokenDefinitionMatch() { var previousTokenType = TokenType.NotDefined; var definition = new IdentifierTokenDefinition(); var lexer = new BaZicLexer(); lexer.InputCode = " Identifier1112345"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "+Identifier1112345"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "@dentifier"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "1dentifier"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = "Identifier1["; var match = definition.Match(lexer, 0, previousTokenType); Assert.IsTrue(match.IsMatch); Assert.AreEqual(TokenType.Identifier, match.TokenType); Assert.AreEqual("Identifier1", match.Value); lexer.InputCode = "Identifier"; Assert.AreEqual("Identifier", definition.Match(lexer, 0, previousTokenType).Value); lexer.InputCode = "édentifier"; Assert.AreEqual("édentifier", definition.Match(lexer, 0, previousTokenType).Value); lexer.InputCode = "éde@ntifier"; Assert.AreEqual("éde@ntifier", definition.Match(lexer, 0, previousTokenType).Value); lexer.InputCode = "éde+ntifier"; Assert.AreEqual("éde", definition.Match(lexer, 0, previousTokenType).Value); }
/// <inheritdoc/> internal override TokenMatch Match(BaZicLexer lexer, int startIndex, TokenType previousTokenType) { if (!char.IsLetter(lexer.InputCode[startIndex]) && lexer.InputCode[startIndex] != '_') { return(new TokenMatch { IsMatch = false }); } var closestSeparatorIndex = lexer.InputCode.Length; var index = lexer.InputCode.IndexOfAny(SymbolHelper.SymbolSeparators, startIndex); if (index > startIndex && index < closestSeparatorIndex) { closestSeparatorIndex = index; } if (closestSeparatorIndex > startIndex) { var value = lexer.InputCode.Substring(startIndex, closestSeparatorIndex - startIndex); return(new TokenMatch { IsMatch = true, TokenType = TokenType.Identifier, Value = value.ToString(), ParsedLength = value.Length }); } return(new TokenMatch { IsMatch = false }); }
public void SpecialCharacterTokenDefinitionMatch() { var previousTokenType = TokenType.NotDefined; var definition = new TokenDefinition(TokenType.Comma, ",", false); var lexer = new BaZicLexer(); lexer.InputCode = "Identifier, Id2"; Assert.IsFalse(definition.Match(lexer, 0, previousTokenType).IsMatch); lexer.InputCode = ", Identifier"; var match = definition.Match(lexer, 0, previousTokenType); Assert.IsTrue(match.IsMatch); Assert.AreEqual(TokenType.Comma, match.TokenType); Assert.AreEqual(",", match.Value); lexer.InputCode = ",Identifier"; match = definition.Match(lexer, 0, previousTokenType); Assert.IsTrue(match.IsMatch); Assert.AreEqual(",", match.Value); }
public void TokenizeIntegers() { var lexer = new BaZicLexer(); var test1 = "var1 = 123"; var result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("123", result[3].Value); test1 = "var1 = -123"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("-123", result[3].Value); test1 = "var1 = +123"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("123", result[3].Value); test1 = "var1 = -(-123 + 2)"; result = lexer.Tokenize(test1); Assert.AreEqual(10, result.Count); Assert.AreEqual(TokenType.Minus, result[3].TokenType); Assert.AreEqual(TokenType.LeftParenth, result[4].TokenType); Assert.AreEqual(TokenType.Integer, result[5].TokenType); Assert.AreEqual(TokenType.Plus, result[6].TokenType); Assert.AreEqual(TokenType.Integer, result[7].TokenType); Assert.AreEqual(TokenType.RightParenth, result[8].TokenType); Assert.AreEqual("-123", result[5].Value); Assert.AreEqual("2", result[7].Value); test1 = "-1 + +4"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Integer, result[1].TokenType); Assert.AreEqual("-1", result[1].Value); Assert.AreEqual(TokenType.Plus, result[2].TokenType); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("4", result[3].Value); test1 = "1+-1"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Integer, result[1].TokenType); Assert.AreEqual("1", result[1].Value); Assert.AreEqual(TokenType.Plus, result[2].TokenType); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("-1", result[3].Value); test1 = "1-+1"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Integer, result[1].TokenType); Assert.AreEqual("1", result[1].Value); Assert.AreEqual(TokenType.Minus, result[2].TokenType); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("1", result[3].Value); test1 = "1--1"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Integer, result[1].TokenType); Assert.AreEqual("1", result[1].Value); Assert.AreEqual(TokenType.Minus, result[2].TokenType); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("-1", result[3].Value); test1 = "1++1"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Integer, result[1].TokenType); Assert.AreEqual("1", result[1].Value); Assert.AreEqual(TokenType.Plus, result[2].TokenType); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("1", result[3].Value); test1 = "1+1"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Integer, result[1].TokenType); Assert.AreEqual("1", result[1].Value); Assert.AreEqual(TokenType.Plus, result[2].TokenType); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("1", result[3].Value); test1 = "1-1"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Integer, result[1].TokenType); Assert.AreEqual("1", result[1].Value); Assert.AreEqual(TokenType.Minus, result[2].TokenType); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("1", result[3].Value); }
public void TokenizeDouble() { var lexer = new BaZicLexer(); var test1 = "var1 = 123.45"; var result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Double, result[3].TokenType); Assert.AreEqual("123.45", result[3].Value); test1 = "var1 = -123.45"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Double, result[3].TokenType); Assert.AreEqual("-123.45", result[3].Value); test1 = "var1 = -.45"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Double, result[3].TokenType); Assert.AreEqual("-0.45", result[3].Value); test1 = "var1 = +123.45"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Double, result[3].TokenType); Assert.AreEqual("123.45", result[3].Value); test1 = "var1 = -(-123.45 + 2.45)"; result = lexer.Tokenize(test1); Assert.AreEqual(10, result.Count); Assert.AreEqual(TokenType.Minus, result[3].TokenType); Assert.AreEqual(TokenType.LeftParenth, result[4].TokenType); Assert.AreEqual(TokenType.Double, result[5].TokenType); Assert.AreEqual(TokenType.Plus, result[6].TokenType); Assert.AreEqual(TokenType.Double, result[7].TokenType); Assert.AreEqual(TokenType.RightParenth, result[8].TokenType); Assert.AreEqual("-123.45", result[5].Value); Assert.AreEqual("2.45", result[7].Value); test1 = "-1.45 + +4.45"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Double, result[1].TokenType); Assert.AreEqual("-1.45", result[1].Value); Assert.AreEqual(TokenType.Plus, result[2].TokenType); Assert.AreEqual(TokenType.Double, result[3].TokenType); Assert.AreEqual("4.45", result[3].Value); test1 = "var1 = -123.45f"; result = lexer.Tokenize(test1); Assert.AreEqual(7, result.Count); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("-123", result[3].Value); Assert.AreEqual(TokenType.Dot, result[4].TokenType); Assert.AreEqual(TokenType.NotDefined, result[5].TokenType); test1 = "var1 = -123.45.6"; result = lexer.Tokenize(test1); Assert.AreEqual(7, result.Count); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("-123", result[3].Value); test1 = "var1 = -123,45"; result = lexer.Tokenize(test1); Assert.AreEqual(7, result.Count); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("-123", result[3].Value); test1 = "var1 = -123 45"; result = lexer.Tokenize(test1); Assert.AreEqual(6, result.Count); Assert.AreEqual(TokenType.Integer, result[3].TokenType); Assert.AreEqual("-123", result[3].Value); test1 = "1.2+1.2"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Double, result[1].TokenType); Assert.AreEqual("1.2", result[1].Value); Assert.AreEqual(TokenType.Plus, result[2].TokenType); Assert.AreEqual(TokenType.Double, result[3].TokenType); Assert.AreEqual("1.2", result[3].Value); test1 = "1.2-1.2"; result = lexer.Tokenize(test1); Assert.AreEqual(5, result.Count); Assert.AreEqual(TokenType.Double, result[1].TokenType); Assert.AreEqual("1.2", result[1].Value); Assert.AreEqual(TokenType.Minus, result[2].TokenType); Assert.AreEqual(TokenType.Double, result[3].TokenType); Assert.AreEqual("1.2", result[3].Value); }
public void TokenizeCode() { var lexer = new BaZicLexer(); var test1 = @"VARIABLE var1 = 1 EXTERN FUNCTION Main(arg1, arg2) IF NOT arg1 <= arg2 THEN System.Console.WriteLine(""Hello ""+var1+1) END IF END FUNCTION"; var result = lexer.Tokenize(test1); Assert.AreEqual(43, result.Count); var expectedToken = new TokenType[] { TokenType.StartCode, TokenType.Variable, TokenType.Identifier, TokenType.Equal, TokenType.Integer, TokenType.NewLine, TokenType.NewLine, TokenType.Extern, TokenType.Function, TokenType.Identifier, TokenType.LeftParenth, TokenType.Identifier, TokenType.Comma, TokenType.Identifier, TokenType.RightParenth, TokenType.NewLine, TokenType.If, TokenType.Not, TokenType.Identifier, TokenType.LesserThan, TokenType.Equal, TokenType.Identifier, TokenType.Then, TokenType.NewLine, TokenType.Identifier, TokenType.Dot, TokenType.Identifier, TokenType.Dot, TokenType.Identifier, TokenType.LeftParenth, TokenType.String, TokenType.Plus, TokenType.Identifier, TokenType.Plus, TokenType.Integer, TokenType.RightParenth, TokenType.NewLine, TokenType.End, TokenType.If, TokenType.NewLine, TokenType.End, TokenType.Function, TokenType.EndCode, }; for (var i = 0; i < result.Count; i++) { Assert.AreEqual(expectedToken[i], result[i].TokenType); } }
public void TokenizeIdentifiers() { var lexer = new BaZicLexer(); var test1 = "VARIABLE Variable = 1"; var result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.Variable, result[2].TokenType); test1 = "VARIABLE Variables"; result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual("Variables", result[2].Value); test1 = "VARIABLE Variable_Name"; result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual("Variable_Name", result[2].Value); test1 = "VARIABLE Variable-Name"; result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual("Variable", result[2].Value); test1 = "VARIABLE v"; result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual("v", result[2].Value); test1 = "VARIABLE 1v"; result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.NotDefined, result[2].TokenType); test1 = "VARIABLE v1"; result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual("v1", result[2].Value); test1 = "VARIABLE v.ToString()"; result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual("v", result[2].Value); test1 = "VARIABLE v@"; result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual("v@", result[2].Value); test1 = "VARIABLE v@[]"; result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual("v@", result[2].Value); test1 = "VARIABLE ANDHello"; result = lexer.Tokenize(test1); Assert.AreEqual(TokenType.Identifier, result[2].TokenType); Assert.AreEqual("ANDHello", result[2].Value); }