Exemple #1
0
        public void AcceptNextCharacter_WithDefaultFieldDelimiter_AddsCharacterToCurrentToken()
        {
            WithCurrentToken("read token");
            var result = _sut.AcceptNextCharacter(TokenizerStateContext, ',');

            Assert.That(result, Is.Not.Null);
            Assert.That(result.Value, IsA.ValueToken("read token"));
        }
Exemple #2
0
        public void AcceptNextCharacter_OnIntermediateState_WithNewLine_ReturnsNewValueToken()
        {
            WithCurrentToken("read token");
            _sut.AcceptNextCharacter(TokenizerStateContext, '\r');

            var result = TokenizerStateContext.State.AcceptNextCharacter(TokenizerStateContext, '\n');

            Assert.That(result, IsA.ValueToken("read token"));
        }
        public void AcceptNextCharacter_WithCharacterNotMatchingString_ReturnsTokenOfOriginalState()
        {
            A.CallTo(() => _originalState.AcceptNextCharacter(A <ITokenizerStateContext> ._, 'x'))
            .Returns(Token.CreateValueToken("x"));
            var sut = CreateFor("\n");

            var result = sut.AcceptNextCharacter(TokenizerStateContext, 'x');

            Assert.That(result, IsA.ValueToken("x"));
        }
Exemple #4
0
        public void AcceptNextCharacter_WithNonDefaultFieldDelimiter_ReturnsNewValueToken()
        {
            WithCurrentToken("read token");
            _configuration.FieldDelimiter = ';';

            var result = _sut.AcceptNextCharacter(TokenizerStateContext, ';');

            Assert.That(result, Is.Not.Null);
            Assert.That(result.Value, IsA.ValueToken("read token"));
        }
Exemple #5
0
        public void Tokenize_WithEscapedNewLine_ReturnsCorrectTokens()
        {
            Token[] result;
            using (var reader = CreateReaderForTestFile("WithEscapedNewLine.csv"))
            {
                result = _sut.Tokenize(reader).ToArray();
            }

            Assert.That(result, Has.Length.EqualTo(3));
            Assert.That(result[0], IsA.ValueToken("Hell\r\no"));
            Assert.That(result[1], IsA.ValueToken(@"this is one record"));
            Assert.That(result[2], IsA.RecordDelimiterToken());
        }
Exemple #6
0
        public void Tokenize_WithAllEscaped_ReturnsCorrectTokens()
        {
            Token[] result;
            using (var reader = CreateReaderForTestFile("WithAllEscaped.csv"))
            {
                result = _sut.Tokenize(reader).ToArray();
            }

            Assert.That(result, Has.Length.EqualTo(8));
            Assert.That(result[0], IsA.ValueToken("Hello"));
            Assert.That(result[1], IsA.ValueToken("I"));
            Assert.That(result[2], IsA.ValueToken("contain"));
            Assert.That(result[3], IsA.ValueToken("a"));
            Assert.That(result[4], IsA.ValueToken(@""""));
            Assert.That(result[5], IsA.RecordDelimiterToken());
            Assert.That(result[6], IsA.ValueToken("Test"));
            Assert.That(result[7], IsA.RecordDelimiterToken());
        }
Exemple #7
0
        public void Tokenize_SimpleCsvWithoutHeader_ReturnsCorrectTokens()
        {
            Token[] result;
            using (var reader = CreateReaderForTestFile("SimpleWithoutHeader.csv"))
            {
                result = _sut.Tokenize(reader).ToArray();
            }

            Assert.That(result, Has.Length.EqualTo(10));
            Assert.That(result[0], IsA.ValueToken("Hello"));
            Assert.That(result[1], IsA.ValueToken("I"));
            Assert.That(result[2], IsA.ValueToken("am"));
            Assert.That(result[3], IsA.ValueToken("an"));
            Assert.That(result[4], IsA.RecordDelimiterToken());
            Assert.That(result[5], IsA.ValueToken("very"));
            Assert.That(result[6], IsA.ValueToken("simple"));
            Assert.That(result[7], IsA.ValueToken("csv"));
            Assert.That(result[8], IsA.ValueToken("file"));
            Assert.That(result[9], IsA.RecordDelimiterToken());
        }