public void ParseDotNetMethodMethodWithParenthesis() { Lexer tokenizer = new Lexer("(anObject !nativeMethod)"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("(", token.Value); Assert.AreEqual(TokenType.Punctuation, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("anObject", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("!nativeMethod", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual(")", token.Value); Assert.AreEqual(TokenType.Punctuation, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void CompileBlockWithDot() { Lexer lexer = new Lexer("[. 1. 2]"); ModelParser parser = new ModelParser(lexer); var result = parser.ParseBlock(); Assert.IsNotNull(result); }
public void ParseDotNetMethodMethod() { Lexer tokenizer = new Lexer("anObject !nativeMethod"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("anObject", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("!nativeMethod", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ParseDotNetObjectAndMethod() { Lexer tokenizer = new Lexer("@System.IO.FileInfo !new: 'FooBar.txt'"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("@System.IO.FileInfo", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("!new:", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("FooBar.txt", token.Value); Assert.AreEqual(TokenType.String, token.Type); }
public void ProcessReal() { Lexer tokenizer = new Lexer("10.234"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("10.234", token.Value); Assert.AreEqual(TokenType.Real, token.Type); }
public void ProcessQuotedSymbol() { Lexer tokenizer = new Lexer("#'hello world'"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("hello world", token.Value); Assert.AreEqual(TokenType.Symbol, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ProcessEmptyString() { Lexer tokenizer = new Lexer(string.Empty); Assert.IsNull(tokenizer.NextToken()); }
public void SkipMultiLineComment() { Lexer tokenizer = new Lexer("\"This is a \n a multi-line\ncomment\""); Assert.IsNull(tokenizer.NextToken()); }
public void ProcessTwoTokens() { Lexer tokenizer = new Lexer("token1 token2"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("token1", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("token2", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ProcessTwoNames() { Lexer tokenizer = new Lexer("@self"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("@", token.Value); Assert.AreEqual(TokenType.Operator, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("self", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ProcessStringWithEscapedDelimiters() { Lexer tokenizer = new Lexer("'[''do nothing'']'"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("['do nothing']", token.Value); Assert.AreEqual(TokenType.String, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ProcessOneTokenWithSpacesAndComment() { Lexer tokenizer = new Lexer(" \"This is a token \" token \"This another comment\""); Token token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("token", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ProcessNotDottedNameSecondWordIsLower() { Lexer tokenizer = new Lexer("Smalltalk.myPackage"); Token token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("Smalltalk", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual(".", token.Value); Assert.AreEqual(TokenType.Punctuation, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("myPackage", token.Value); Assert.AreEqual(TokenType.Name, token.Type); Assert.IsNull(tokenizer.NextToken()); }
public void ProcessNotClosedString() { Lexer tokenizer = new Lexer("'string"); Token token; token = tokenizer.NextToken(); }
public void ProcessIntegerNumberWithRadix() { Lexer tokenizer = new Lexer("16rFF"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("16rFF", token.Value); Assert.AreEqual(TokenType.Integer, token.Type); Assert.IsNull(tokenizer.NextToken()); }
public void ProcessInteger() { Lexer tokenizer = new Lexer("10"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("10", token.Value); Assert.AreEqual(TokenType.Integer, token.Type); }
public void ProcessEnclosedSymbol() { Lexer tokenizer = new Lexer("#{Module.Submodule}"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("Module.Submodule", token.Value); Assert.AreEqual(TokenType.Symbol, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ProcessReturnNegativeNumber() { Lexer tokenizer = new Lexer("^-1"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("^", token.Value); Assert.AreEqual(TokenType.Operator, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("-", token.Value); Assert.AreEqual(TokenType.Operator, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("1", token.Value); Assert.AreEqual(TokenType.Integer, token.Type); Assert.IsNull(tokenizer.NextToken()); }
public void ProcessSpecialNameAndParenthesis() { Lexer tokenizer = new Lexer("@System.IO.FileInfo)"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("@System.IO.FileInfo", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual(")", token.Value); Assert.AreEqual(TokenType.Punctuation, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ProcessOperators() { string opers = "== ~= ~~ >= <= := ^ < > : = - + * / &"; string[] opers2 = opers.Split(' '); Lexer tokenizer = new Lexer(opers); Token token; for (int k = 0; k < opers2.Length; k++) { token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual(opers2[k], token.Value); Assert.AreEqual(TokenType.Operator, token.Type); } Assert.IsNull(tokenizer.NextToken()); }
public void ProcessTokenAndString() { Lexer tokenizer = new Lexer("token 'string'"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("token", token.Value); Assert.AreEqual(TokenType.Name, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("string", token.Value); Assert.AreEqual(TokenType.String, token.Type); }
public void ProcessParameter() { Lexer tokenizer = new Lexer(":x"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual(TokenType.Parameter, token.Type); Assert.AreEqual("x", token.Value); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ProcessTwoSymbols() { Lexer tokenizer = new Lexer("#aSymbol #anotherSymbol"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("aSymbol", token.Value); Assert.AreEqual(TokenType.Symbol, token.Type); token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("anotherSymbol", token.Value); Assert.AreEqual(TokenType.Symbol, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ProcessPunctuation() { Lexer tokenizer = new Lexer("."); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual(".", token.Value); Assert.AreEqual(TokenType.Punctuation, token.Type); }
public void SkipComment() { Lexer tokenizer = new Lexer("\"This is a comment\""); Assert.IsNull(tokenizer.NextToken()); }
public void ProcessAtOperator() { Lexer tokenizer = new Lexer("@"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("@", token.Value); Assert.AreEqual(TokenType.Operator, token.Type); }
public void ProcessComplexSymbol() { Lexer tokenizer = new Lexer("#aSymbol:with:many>chars"); Token token; token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("aSymbol:with:many>chars", token.Value); Assert.AreEqual(TokenType.Symbol, token.Type); token = tokenizer.NextToken(); Assert.IsNull(token); }
public void ProcessDoubleDottedName() { Lexer tokenizer = new Lexer("Smalltalk.MyPackage.MySubpackage"); Token token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual("Smalltalk.MyPackage.MySubpackage", token.Value); Assert.AreEqual(TokenType.DottedName, token.Type); Assert.IsNull(tokenizer.NextToken()); }
public Parser(Lexer tok) { this.tokenizer = tok; }
public void ProcessPunctuations() { string punct = "().|[];"; Lexer tokenizer = new Lexer(punct); Token token; for (int k = 0; k < punct.Length; k++) { token = tokenizer.NextToken(); Assert.IsNotNull(token); Assert.AreEqual(punct[k], token.Value[0]); Assert.AreEqual(1, token.Value.Length); Assert.AreEqual(TokenType.Punctuation, token.Type); } }