public void ShouldReturnNullWhenAtTheEndOfTheTokenizer() { var stringTokenizer = new StringTokenizer(string.Empty); var mockMatchWord = new MockMatchWord(new List <StringMatcherBase>()); var token = mockMatchWord.CallImplementation(stringTokenizer); Assert.IsNull(token); }
public void ShouldReturnNullWhenAWhiteSpaceCharacterIsFound() { var stringTokenizer = new StringTokenizer(" hello"); var mockMatchWord = new MockMatchWord(new List <StringMatcherBase>()); var token = mockMatchWord.CallImplementation(stringTokenizer); Assert.IsNull(token); }
public void ShouldSetTheValueOfTheToken() { var stringTokenizer = new StringTokenizer("hello"); var mockMatchWord = new MockMatchWord(new List <StringMatcherBase>()); var token = mockMatchWord.CallImplementation(stringTokenizer); Assert.AreEqual("hello", token.TokenValue); }
public void ShouldMatchAllCharactersUpToAWhiteSpaceCharacter() { var stringTokenizer = new StringTokenizer("hello "); var mockMatchWord = new MockMatchWord(new List <StringMatcherBase>()); var token = mockMatchWord.CallImplementation(stringTokenizer); Assert.IsNotNull(token); }
public void ShouldReturnNullWhenASpecialCharacterIsFoundInstead() { var stringTokenizer = new StringTokenizer(",hello"); var mockMatchWord = new MockMatchWord(new List <StringMatcherBase> { new MatchKeyword(TokenType.Word, ",") }); var token = mockMatchWord.CallImplementation(stringTokenizer); Assert.IsNull(token); }
public void ShouldMatchAllCharacterUpToASpecialCharacter() { var stringTokenizer = new StringTokenizer("hello,"); var mockMatchWord = new MockMatchWord(new List <StringMatcherBase> { new MatchKeyword(TokenType.Word, ",") }); var token = mockMatchWord.CallImplementation(stringTokenizer); Assert.IsNotNull(token); }
public void ShouldCaptureCurrentTheLocation() { var stringTokenizer = new Mock <StringTokenizer>("hello") { CallBase = true }; var mockMatchWord = new MockMatchWord(new List <StringMatcherBase>()); mockMatchWord.CallImplementation(stringTokenizer.Object); stringTokenizer.Verify(t => t.GetCurrentLocation(), Times.Once); }
public void ShouldStopConsumingCharcatersAtTheEndOfTheTokenizer() { var stringTokenizer = new Mock <StringTokenizer>("hello") { CallBase = true }; var mockMatchWord = new MockMatchWord(new List <StringMatcherBase>()); mockMatchWord.CallImplementation(stringTokenizer.Object); stringTokenizer.Verify(t => t.Consume(), Times.Exactly(5)); stringTokenizer.Verify(t => t.IsAtEnd(), Times.AtLeastOnce); Assert.IsTrue(stringTokenizer.Object.IsAtEnd()); }
public void ShouldCreateATokenFromCurrentTheLoction() { var tokenLocation = new Mock <ITokenLocation>(MockBehavior.Default); var stringTokenizer = new Mock <StringTokenizer>("hello") { CallBase = true }; stringTokenizer.Setup(t => t.GetCurrentLocation()).Returns(tokenLocation.Object); var mockMatchWord = new MockMatchWord(new List <StringMatcherBase>()); mockMatchWord.CallImplementation(stringTokenizer.Object); tokenLocation.Verify(l => l.CreateToken(It.IsAny <TokenType>(), It.IsAny <string>()), Times.Once); }