public void CharTokenizationHandler_EnsureFailure_Test()
        {
            RequiredTokenDefinitions <TokenKind> definitions = new RequiredTokenDefinitions <TokenKind>(TokenKind.Whitespace, TokenKind.LineBreak, TokenKind.Unknown, TokenKind.EOF);
            Tokenizer <Token, TokenKind>         tokenizer   = new Tokenizer <Token, TokenKind>(definitions);
            CharacterLiteralTokenizationHandler  handler     = new CharacterLiteralTokenizationHandler();

            tokenizer.Handlers.Add(handler);
            tokenizer.Handlers.Add(handler);
            tokenizer.Handlers.Add(new WhitespaceTokenizationHandler <Token, TokenKind>());
            Token[] tokens = tokenizer.Tokenize("\'r\' \'\\u0066\' \'\\\'\' \'\\x000a\' asd");
            Assert.AreEqual(true, tokens[tokens.Length - 1].PreviousNWS.Definition == TokenKind.Unknown);
        }
        public void CharTokenizationHandler_EnsureSuccess_Test()
        {
            RequiredTokenDefinitions <TokenKind> definitions = new RequiredTokenDefinitions <TokenKind>(TokenKind.Whitespace, TokenKind.LineBreak, TokenKind.Unknown, TokenKind.EOF);
            Tokenizer <Token, TokenKind>         tokenizer   = new Tokenizer <Token, TokenKind>(definitions);
            CharacterLiteralTokenizationHandler  handler     = new CharacterLiteralTokenizationHandler();

            tokenizer.Handlers.Add(handler);
            tokenizer.Handlers.Add(handler);
            tokenizer.Handlers.Add(new WhitespaceTokenizationHandler <Token, TokenKind>());
            Token[] tokens = tokenizer.Tokenize("\'r\' \'\\u0066\' \'\\\'\' \'\\x000a\' ");

            for (Token i = tokens[0]; !i.Definition.IsEndOfFile; i = i.NextNWS)
            {
                Assert.AreEqual(true, i.Definition.IsCharLiteral);
            }
        }