private void Create() { var tokenizer = new JsonTokenizer(); Assert.Equal(1, tokenizer.CurrentLineNumber); Assert.Equal(1, tokenizer.CurrentPosition); Assert.Empty(tokenizer.Tokens); }
private void Process_SingleTokens(string json, JsonToken expected) { var tokenizer = new JsonTokenizer(); int tokenCount = tokenizer.Process(json); Assert.Equal(1, tokenCount); Assert.Equal(expected, tokenizer.Tokens.Dequeue()); }
private void Process_TokenSets() { const int iterations = 10000; var data = ProcessTestData_SingleTokens .Select(x => new Tuple <string, JsonToken>((string)x[0], (JsonToken)x[1])) .ToArray(); var tokenizer = new JsonTokenizer(); var random = new Random(0); var json = new StringBuilder(); var tokens = new List <JsonToken>(); for (int run = 0; run < iterations; run++) { int tokenCount = random.Next(2, 30); json.Clear(); tokens.Clear(); for (int j = 0; j < tokenCount; j++) { int selectedTokenIndex = random.Next(0, data.Length - 1); json.Append(data[selectedTokenIndex].Item1); json.Append(WhiteSpaceCharacters[random.Next(0, WhiteSpaceCharacters.Length - 1)]); tokens.Add(data[selectedTokenIndex].Item2); } // --- actual test start --- int extractedTokenCount = tokenizer.Process(json.ToString()); Assert.Equal(tokens.Count, extractedTokenCount); Assert.Equal(tokens.ToArray(), tokenizer.Tokens.ToArray()); // --- actual test end --- tokenizer.Reset(); } }