public void EmptyStreamReturnsEOF()
 {
     using(var stream = new StreamReader(new MemoryStream()))
     {
         var lexerUnderTest = new JsonLexer(stream);
         var token = lexerUnderTest.GetNextToken();
         Assert.Equal(token.Type, TokenTypes.EOF);
     }
 }
        public void CanIdentifyLiteralTokensAndIgnoresAllWhitespace()
        {
            var tokenString = "false true null";

            using(var stream = new StreamReader(new MemoryStream(Encoding.UTF8.GetBytes(tokenString))))
            {
                var lexerUnderTest = new JsonLexer(stream);
                var token = lexerUnderTest.GetNextToken();
                Assert.Equal(token.Type, TokenTypes.LITERAL);
                Assert.Equal(new string(token.Value), "false");
                token = lexerUnderTest.GetNextToken();
                Assert.Equal(token.Type, TokenTypes.LITERAL);
                Assert.Equal(new string(token.Value), "true");
                token = lexerUnderTest.GetNextToken();
                Assert.Equal(token.Type, TokenTypes.LITERAL);
                Assert.Equal(new string(token.Value), "null");
            }
        }
Beispiel #3
0
 public static void Main(string[] args)
 {
     Console.WriteLine("TokenizerTest: START");
     Console.WriteLine("TokenizerTest: RawTokenTest: START");
     var testString = "\"This is a string\" null false true \"another string\" { } [ ] ,";
     var times = new List<double>();
     for(int i = 0; i < 5; i++)
     {
         times.Clear();
         for (int j = 0;  j < 1000; j++)
         {
             using(var stream = new StreamReader(new MemoryStream(Encoding.UTF8.GetBytes(testString))))
             {
                 var lexerUnderTest = new JsonLexer(stream);
                 var time = Time(() => {
                     Token token = null;
                     do
                     {
                         token = lexerUnderTest.GetNextToken();
                     } while(token.Type != TokenTypes.EOF);
                 });
                 times.Add(time.TotalMilliseconds);
             }
         }
         Console.WriteLine($"TokenizerTest: {times.Average()}");
     }
     Console.WriteLine("TokenizerTest: RawTokenTest: STOP");
     Console.WriteLine("TokenizerTest: STOP");
     Console.WriteLine("ParserTest: START");
     Console.WriteLine("ParserTest: ProjectParse START");
     for(int i = 0; i < 5; i++)
     {
         times.Clear();
         for (int j = 0;  j < 1000; j++)
         {
             var time = Time(RunProjectParse);
             times.Add(time.TotalMilliseconds);
         }
         Console.WriteLine($"ParserTest: ProjectParse: {times.Average()}");
     }
     Console.WriteLine("ParserTest: ExistingParser START");
     for(int i = 0; i < 5; i++)
     {
         times.Clear();
         for (int j = 0;  j < 1000; j++)
         {
             var time = Time(RunExistingProjectParse);
             times.Add(time.TotalMilliseconds);
         }
         Console.WriteLine($"ParserTest: ProjectParse: {times.Average()}");
     }
     Console.WriteLine("ParseTest: ExistingParser STOP");
     Console.WriteLine("ParserTest: ProjectParse STOP");
 }
        public void CanIdentifyStructuralChars()
        {
            var tokenString = "{}[]:,";
            var structuralTokenTypes = new int[] { TokenTypes.BEGIN_OBJECT, TokenTypes.END_OBJECT, TokenTypes.BEGIN_ARRAY, TokenTypes.END_ARRAY, TokenTypes.NAME_SEPARATOR, TokenTypes.VALUE_SEPARATOR };

            using(var stream = new StreamReader(new MemoryStream(Encoding.UTF8.GetBytes(tokenString))))
            {
                var lexerUnderTest = new JsonLexer(stream);

                for(int i = 0; i < structuralTokenTypes.Length; i++)
                {
                    var token = lexerUnderTest.GetNextToken();
                    Assert.Equal(token.Type, structuralTokenTypes[i]);
                }
            }
        }
Beispiel #5
0
 public JsonParser(JsonLexer lexer)
 {
     _lexer = lexer;
     _currentToken = lexer.GetNextToken();
 }