public void CreateAndReloadCombinedFile() { int maxBackingBufferBytes = 3; // let's make sure we are going to span multiple chunks... FakeFile f1 = CreateFakeFile(R("foo", "bar1.txt"), "bar1.txt"); FakeFile f2 = CreateFakeFile(R("foo", "bar2.txt"), "bar2.txt"); FakeFile f3 = CreateFakeFile(R("foo", "bar3.txt"), "bar3.txt"); XAssert.IsTrue(f1.Content.Length > maxBackingBufferBytes * 3); XAssert.IsTrue(f2.Content.Length > maxBackingBufferBytes * 3); XAssert.IsTrue(f3.Content.Length > maxBackingBufferBytes * 3); Logger.FileCombinerStats stats = new Logger.FileCombinerStats(); // Create a combiner and add some files using (FileCombiner combiner = CreateFileCombiner(m_loggingContext, m_path, 1)) { combiner.GetStatsRefForTest(ref stats); AddFile(combiner, f3); AddFile(combiner, f2); AddFile(combiner, f1); } XAssert.AreEqual(3, stats.EndCount); XAssert.AreEqual(0, stats.CompactingTimeMs, "FileCombiner should not have been compacted"); // Make sure the file is longer than the max backing buffer so we can test data being split across buffers FileInfo info = new FileInfo(m_path); XAssert.IsTrue(info.Length > maxBackingBufferBytes); // reload the combiner using (FileCombiner combiner = CreateFileCombiner(m_loggingContext, m_path, 1, maxBackingBufferBytes)) { // fetch a file that exists and verify the correct data is returned using (MemoryStream ms = combiner.RequestFile(f1.Path, f1.Hash)) { XAssert.IsNotNull(ms); AssertContentMatches(ms, f1); } // Fetch a file with the wrong hash. Make sure no content is returned using (MemoryStream ms = combiner.RequestFile(f1.Path, f2.Hash)) { XAssert.IsNull(ms); } // Fetch a file that doesn't exist. Make sure no content is returned using (MemoryStream ms = combiner.RequestFile(R("foo", "bar4"), f2.Hash)) { XAssert.IsNull(ms); } } }
public void CreateAndReloadCombinedFileAlignmentBug() { FakeFile file = default(FakeFile); file.Path = R("foo", "bar1.txt"); using (var contentStream = new MemoryStream()) { using (var binaryWriter = new BuildXLWriter(debug: false, stream: contentStream, leaveOpen: true, logStats: false)) { binaryWriter.WriteCompact(-1); } file.Content = contentStream.ToArray(); file.Hash = ContentHashingUtilities.HashBytes(file.Content); } int maxBackingBufferBytes = 4 + 6 + 8 + file.Content.Length; // magic size that used to trigger a bug Logger.FileCombinerStats stats = new Logger.FileCombinerStats(); // Create a combiner and add some files using (FileCombiner combiner = CreateFileCombiner(m_loggingContext, m_path, 1)) { combiner.GetStatsRefForTest(ref stats); AddFile(combiner, file); } XAssert.AreEqual(1, stats.EndCount); XAssert.AreEqual(0, stats.CompactingTimeMs, "FileCombiner should not have been compacted"); // reload the combiner using (FileCombiner combiner = CreateFileCombiner(m_loggingContext, m_path, 1, maxBackingBufferBytes)) { // fetch a file that exists and verify the correct data is returned using (MemoryStream ms = combiner.RequestFile(file.Path, file.Hash)) { XAssert.IsNotNull(ms); AssertContentMatches(ms, file); } } }