public void Tokenize_LineEndsWithSpace_ReturnsTokens()
        {
            var tokenizer = new LineSegmentTokenizer();

            Assert.That(tokenizer.TokenizeToStrings("This is the first sentence. \nThis is the second sentence."),
                        Is.EqualTo(new[] { "This is the first sentence. ", "This is the second sentence." }));
        }
예제 #2
0
        private static void Tokenize_EndsWithNewLineAndSpace_ReturnsTokens(Assert assert)
        {
            var tokenizer = new LineSegmentTokenizer();

            assert.DeepEqual(tokenizer.TokenizeToStrings("This is a test.\n ").ToArray(),
                             new[] { "This is a test.", " " });
        }
        public void Tokenize_EmptyLine_ReturnsTokens()
        {
            var tokenizer = new LineSegmentTokenizer();

            Assert.That(tokenizer.TokenizeToStrings("This is the first sentence.\n\nThis is the third sentence."),
                        Is.EqualTo(new[] { "This is the first sentence.", "", "This is the third sentence." }));
        }
        public void Tokenize_EndsWithTextAndSpace_ReturnsTokens()
        {
            var tokenizer = new LineSegmentTokenizer();

            Assert.That(tokenizer.TokenizeToStrings("This is the first sentence.\nThis is a partial sentence "),
                        Is.EqualTo(new[] { "This is the first sentence.", "This is a partial sentence " }));
        }
        public void Tokenize_EndsWithNewLineAndSpace_ReturnsTokens()
        {
            var tokenizer = new LineSegmentTokenizer();

            Assert.That(tokenizer.TokenizeToStrings("This is a test.\n "),
                        Is.EqualTo(new[] { "This is a test.", " " }));
        }
예제 #6
0
        private static void Tokenize_LineEndsWithSpace_ReturnsTokens(Assert assert)
        {
            var tokenizer = new LineSegmentTokenizer();

            assert.DeepEqual(tokenizer.TokenizeToStrings(
                                 "This is the first sentence. \nThis is the second sentence.").ToArray(),
                             new[] { "This is the first sentence. ", "This is the second sentence." });
        }
예제 #7
0
        private static void Tokenize_EmptyLine_ReturnsTokens(Assert assert)
        {
            var tokenizer = new LineSegmentTokenizer();

            assert.DeepEqual(tokenizer.TokenizeToStrings(
                                 "This is the first sentence.\n\nThis is the third sentence.").ToArray(),
                             new[] { "This is the first sentence.", "", "This is the third sentence." });
        }
예제 #8
0
        private static void Tokenize_SingleLine_ReturnsTokens(Assert assert)
        {
            var tokenizer = new LineSegmentTokenizer();

            assert.DeepEqual(tokenizer.TokenizeToStrings("This is a test.").ToArray(), new[] { "This is a test." });
        }
예제 #9
0
        private static void Tokenize_Empty_ReturnsEmpty(Assert assert)
        {
            var tokenizer = new LineSegmentTokenizer();

            assert.DeepEqual(tokenizer.TokenizeToStrings("").ToArray(), new string[0]);
        }
        public void Tokenize_Empty_ReturnsEmpty()
        {
            var tokenizer = new LineSegmentTokenizer();

            Assert.That(tokenizer.TokenizeToStrings(""), Is.Empty);
        }
        public void Tokenize_SingleLine_ReturnsTokens()
        {
            var tokenizer = new LineSegmentTokenizer();

            Assert.That(tokenizer.TokenizeToStrings("This is a test."), Is.EqualTo(new[] { "This is a test." }));
        }