public void LineMappingStreamReader_Basics() { LogModelSampleBuilder.EnsureSamplesBuilt(); string path = LogModelSampleBuilder.SampleLogPath; // Read it all and find all newline indices byte[] content = File.ReadAllBytes(path); List <long> newlines = new List <long>(); newlines.Add(-1); newlines.Add(-1); for (long i = 0; i < content.Length; ++i) { if (content[i] == (byte)'\n') { newlines.Add(i); } } char[] buffer = new char[1024]; int nextLine = 1; long bytesRead = 0; using (LineMappingStreamReader reader = new LineMappingStreamReader(File.OpenRead(path))) { while (true) { // Read a segment of the file int lengthRead = reader.Read(buffer, 0, buffer.Length); if (lengthRead == 0) { break; } // Count the file bytes now in range bytesRead += reader.CurrentEncoding.GetByteCount(buffer, 0, lengthRead); // Ask the LineMappingStreamReader for the position of (N, 1) for each line in range for (; nextLine < newlines.Count; nextLine++) { long nextNewlinePosition = newlines[nextLine]; if (nextNewlinePosition > bytesRead) { break; } long reportedAtPosition = reader.LineAndCharToOffset(nextLine, 1) - 1; // Verify (N, 1) is one byte after the newline we found Assert.Equal(nextNewlinePosition, reportedAtPosition); } } } }
public void LineMappingStreamReader_WithJsonReader() { LogModelSampleBuilder.EnsureSamplesBuilt(); string filePath = LogModelSampleBuilder.SampleLogPath; JsonSerializer serializer = new JsonSerializer(); // Open a stream to read objects individually using (Stream seekingStream = File.OpenRead(filePath)) { // Read the Json with a LineMappingStreamReader using (LineMappingStreamReader streamReader = new LineMappingStreamReader(File.OpenRead(filePath))) using (JsonTextReader jsonReader = new JsonTextReader(streamReader)) { // Get into the top object jsonReader.Read(); while (jsonReader.Read()) { if (jsonReader.TokenType == JsonToken.StartObject) { // Map each object to a byte position long position = streamReader.LineAndCharToOffset(jsonReader.LineNumber, jsonReader.LinePosition); // Create an object from the original stream JObject expected = (JObject)serializer.Deserialize(jsonReader); // Compare to one we get by seeking to the calculated byte offset JObject actual = ReadAtPosition(serializer, seekingStream, position); // Confirm both objects are the same Assert.Equal(expected.ToString(), actual.ToString()); } } } } }
private static void CompareReadNormalToReadDeferred(string filePath) { LogModelSampleBuilder.EnsureSamplesBuilt(); JsonSerializer serializer = new JsonSerializer(); Log expected; Log actual; // Read normally (JsonSerializer -> JsonTextReader -> StreamReader) using (JsonTextReader reader = new JsonTextReader(new StreamReader(filePath))) { expected = serializer.Deserialize <Log>(reader); Assert.IsType <Dictionary <string, CodeContext> >(expected.CodeContexts); Assert.IsType <List <LogMessage> >(expected.Messages); } // Read with Deferred collections serializer.ContractResolver = new LogModelDeferredContractResolver(); using (JsonPositionedTextReader reader = new JsonPositionedTextReader(filePath)) { actual = serializer.Deserialize <Log>(reader); Assert.IsType <DeferredDictionary <CodeContext> >(actual.CodeContexts); Assert.IsType <DeferredList <LogMessage> >(actual.Messages); } // Deep compare objects which were returned AssertEqual(expected, actual); // DeferredList Code Coverage - CopyTo() LogMessage[] messages = new LogMessage[actual.Messages.Count + 1]; actual.Messages.CopyTo(messages, 1); if (actual.Messages.Count > 0) { Assert.Equal <LogMessage>(actual.Messages[0], messages[1]); } // DeferredDictionary Code Coverage CodeContext context; // TryGetValue Assert.False(actual.CodeContexts.TryGetValue("missing", out context)); if (actual.CodeContexts.Count > 0) { Assert.True(actual.CodeContexts.TryGetValue("load", out context)); } // ContainsKey Assert.False(actual.CodeContexts.ContainsKey("missing")); if (actual.CodeContexts.Count > 0) { Assert.True(actual.CodeContexts.ContainsKey("load")); } // Contains context = new CodeContext() { Name = "LoadRules()", Type = CodeContextType.Method, ParentContextID = "run" }; Assert.False(actual.CodeContexts.Contains(new KeyValuePair <string, CodeContext>("missing", context))); // Missing Key Assert.False(actual.CodeContexts.Contains(new KeyValuePair <string, CodeContext>("run", context))); // Different Value if (actual.CodeContexts.Count > 0) { Assert.True(actual.CodeContexts.Contains(new KeyValuePair <string, CodeContext>("load", context))); // Match Assert.False(actual.CodeContexts.Contains(new KeyValuePair <string, CodeContext>("load", null))); // Match vs. Null } // CopyTo KeyValuePair <string, CodeContext>[] contexts = new KeyValuePair <string, CodeContext> [actual.CodeContexts.Count + 1]; actual.CodeContexts.CopyTo(contexts, 1); if (actual.CodeContexts.Count > 0) { Assert.Equal(actual.CodeContexts.First(), contexts[1]); } // Enumeration Dictionary <string, CodeContext> contextsCopy = new Dictionary <string, CodeContext>(); foreach (KeyValuePair <string, CodeContext> pair in actual.CodeContexts) { contextsCopy[pair.Key] = pair.Value; } Assert.Equal(actual.CodeContexts.Count, contextsCopy.Count); // Enumerate Keys int keyCount = 0; foreach (string key in actual.CodeContexts.Keys) { Assert.True(contextsCopy.ContainsKey(key)); keyCount++; } Assert.Equal(contextsCopy.Count, keyCount); // Enumerate Values int valueCount = 0; foreach (CodeContext value in actual.CodeContexts.Values) { Assert.True(contextsCopy.ContainsValue(value)); valueCount++; } Assert.Equal(contextsCopy.Count, valueCount); }