public void CanParseSimpleishProjectJson() { var json = @"{ ""version"": ""1.0.0-*"", ""compilationOptions"": { ""emitEntryPoint"": false }, ""dependencies"": { ""JsonConfigParser"" : """", ""Microsoft.NETCore.Runtime"": ""1.0.1-beta-*"", ""System.IO"": ""4.0.10-beta-*"", ""System.Console"": ""4.0.0-beta-*"", ""System.Runtime"": ""4.0.21-beta-*"", ""System.IO.FileSystem"": ""4.0.1-beta-*"", ""xunit"" : ""2.1.0"", ""xunit.runner.console"" : ""2.1.0"" }, ""frameworks"": { ""dnxcore50"": { } } }"; using(var stream = new StreamReader(new MemoryStream(Encoding.UTF8.GetBytes(json)))) { var lexer = new JsonLexer(stream); var parser = new JsonParser(lexer); var parsedJson = parser.ParseObject(); foreach(var item in parsedJson.Items) { Console.WriteLine($"Key: {item.Key}, Value: {item.Value}"); } } }
public void EmptyStreamReturnsEOF() { using(var stream = new StreamReader(new MemoryStream())) { var lexerUnderTest = new JsonLexer(stream); var token = lexerUnderTest.GetNextToken(); Assert.Equal(token.Type, TokenTypes.EOF); } }
public static void Main(string[] args) { Console.WriteLine("TokenizerTest: START"); Console.WriteLine("TokenizerTest: RawTokenTest: START"); var testString = "\"This is a string\" null false true \"another string\" { } [ ] ,"; var times = new List<double>(); for(int i = 0; i < 5; i++) { times.Clear(); for (int j = 0; j < 1000; j++) { using(var stream = new StreamReader(new MemoryStream(Encoding.UTF8.GetBytes(testString)))) { var lexerUnderTest = new JsonLexer(stream); var time = Time(() => { Token token = null; do { token = lexerUnderTest.GetNextToken(); } while(token.Type != TokenTypes.EOF); }); times.Add(time.TotalMilliseconds); } } Console.WriteLine($"TokenizerTest: {times.Average()}"); } Console.WriteLine("TokenizerTest: RawTokenTest: STOP"); Console.WriteLine("TokenizerTest: STOP"); Console.WriteLine("ParserTest: START"); Console.WriteLine("ParserTest: ProjectParse START"); for(int i = 0; i < 5; i++) { times.Clear(); for (int j = 0; j < 1000; j++) { var time = Time(RunProjectParse); times.Add(time.TotalMilliseconds); } Console.WriteLine($"ParserTest: ProjectParse: {times.Average()}"); } Console.WriteLine("ParserTest: ExistingParser START"); for(int i = 0; i < 5; i++) { times.Clear(); for (int j = 0; j < 1000; j++) { var time = Time(RunExistingProjectParse); times.Add(time.TotalMilliseconds); } Console.WriteLine($"ParserTest: ProjectParse: {times.Average()}"); } Console.WriteLine("ParseTest: ExistingParser STOP"); Console.WriteLine("ParserTest: ProjectParse STOP"); }
public void CanParseSimpleJson(string jsonString, Type jsonType) { using(var stream = new StreamReader(new MemoryStream(Encoding.UTF8.GetBytes(jsonString)))) { var lexer = new JsonLexer(stream); var parser = new JsonParser(lexer); var parsedJson = parser.ParseObject(); Assert.IsAssignableFrom<JsonType>(parsedJson.Items["simpleKey"]); Assert.True(parsedJson.Items["simpleKey"].GetType() == jsonType); } }
public void CanIdentifyStructuralChars() { var tokenString = "{}[]:,"; var structuralTokenTypes = new int[] { TokenTypes.BEGIN_OBJECT, TokenTypes.END_OBJECT, TokenTypes.BEGIN_ARRAY, TokenTypes.END_ARRAY, TokenTypes.NAME_SEPARATOR, TokenTypes.VALUE_SEPARATOR }; using(var stream = new StreamReader(new MemoryStream(Encoding.UTF8.GetBytes(tokenString)))) { var lexerUnderTest = new JsonLexer(stream); for(int i = 0; i < structuralTokenTypes.Length; i++) { var token = lexerUnderTest.GetNextToken(); Assert.Equal(token.Type, structuralTokenTypes[i]); } } }
public void CanParseSimpleJson() { var jsonString = "{ \"simpleKey\" : \"simpleValue\" }"; using(var stream = new StreamReader(new MemoryStream(Encoding.UTF8.GetBytes(jsonString)))) { var lexer = new JsonLexer(stream); var parser = new JsonParser(lexer); var parsedJson = parser.ParseObject(); foreach(var item in parsedJson.Items) { Console.WriteLine($"Key: {item.Key}, Value: {item.Value}"); } Assert.True(parsedJson.Items.Keys.Count == 1); Assert.IsType<JsonString>(parsedJson.Items["simpleKey"]); } }
public void CanIdentifyLiteralTokensAndIgnoresAllWhitespace() { var tokenString = "false true null"; using(var stream = new StreamReader(new MemoryStream(Encoding.UTF8.GetBytes(tokenString)))) { var lexerUnderTest = new JsonLexer(stream); var token = lexerUnderTest.GetNextToken(); Assert.Equal(token.Type, TokenTypes.LITERAL); Assert.Equal(new string(token.Value), "false"); token = lexerUnderTest.GetNextToken(); Assert.Equal(token.Type, TokenTypes.LITERAL); Assert.Equal(new string(token.Value), "true"); token = lexerUnderTest.GetNextToken(); Assert.Equal(token.Type, TokenTypes.LITERAL); Assert.Equal(new string(token.Value), "null"); } }
public JsonParser(JsonLexer lexer) { _lexer = lexer; _currentToken = lexer.GetNextToken(); }
public static void RunProjectParse() { var json = @"{ ""version"": ""1.0.0-*"", ""compilationOptions"": { ""emitEntryPoint"": false }, ""dependencies"": { ""JsonConfigParser"" : """", ""Microsoft.NETCore.Runtime"": ""1.0.1-beta-*"", ""System.IO"": ""4.0.10-beta-*"", ""System.Console"": ""4.0.0-beta-*"", ""System.Runtime"": ""4.0.21-beta-*"", ""System.IO.FileSystem"": ""4.0.1-beta-*"", ""xunit"" : ""2.1.0"", ""xunit.runner.console"" : ""2.1.0"" }, ""frameworks"": { ""dnxcore50"": { } } }"; using(var stream = new StreamReader(new MemoryStream(Encoding.UTF8.GetBytes(json)))) { var lexer = new JsonLexer(stream); var parser = new JsonParser(lexer); var parsedJson = parser.ParseObject(); } }