public void BrokenEventsDoNotStopReaderAndAreReported() { var zip = Path.Combine(RawDir, "a.zip"); using (var wa = new WritingArchive(zip)) { wa.Add(new CommandEvent { IDESessionUUID = "s1" }); wa.AddAsPlainText("xxx"); wa.Add(new CommandEvent { IDESessionUUID = "s2" }); wa.AddAsPlainText("xxx"); wa.Add(new CommandEvent { IDESessionUUID = "s3" }); } AssertIds("sid:s1", "sid:s2", "sid:s3"); Mock.Get(_log).Verify( l => l.DeserializationError(zip, "1.json", It.IsAny <JsonReaderException>()), Times.Once); Mock.Get(_log).Verify( l => l.DeserializationError(zip, "3.json", It.IsAny <JsonReaderException>()), Times.Once); }
public void SetUp() { _zip = Path.Combine(DirTestRoot, "a.zip"); using (var wa = new WritingArchive(_zip)) { wa.Add(Event(1)); wa.AddAsPlainText("xxx"); wa.Add(Event(2)); } }
public void ShouldCountAdditions() { var wa = new WritingArchive(_zipPath); Assert.AreEqual(0, wa.NumItemsAdded); wa.Add("a"); Assert.AreEqual(1, wa.NumItemsAdded); wa.Add((string)null); Assert.AreEqual(1, wa.NumItemsAdded); wa.Add("a"); Assert.AreEqual(2, wa.NumItemsAdded); wa.Dispose(); Assert.AreEqual(2, wa.NumItemsAdded); }
public void ExistingFilesAreNotOverwritten() { _sut.Dispose(); Directory.CreateDirectory(Path.Combine(_root, "a")); var metaFile = Path.Combine(_root, @"a\.zipfolder"); var zip0 = Path.Combine(_root, @"a\0.zip"); var zip1 = Path.Combine(_root, @"a\1.zip"); // setup fake data from previous run File.WriteAllText(metaFile, "test"); using (var testZip = new WritingArchive(zip0)) { testZip.Add("test"); } // new initialization _sut = new ZipFolderLRUCache <string>(_root, 2); var a = _sut.GetArchive("a"); a.Add("x"); _sut.Dispose(); Assert.True(File.Exists(metaFile)); Assert.True(File.Exists(zip0)); Assert.True(File.Exists(zip1)); Assert.AreEqual("test", File.ReadAllText(metaFile)); AssertZipContent(zip0, "test"); AssertZipContent(zip1, "x"); }
private void RunAnalysis(Lifetime lifetime, ISolution solution) { Console.WriteLine("Starting analysis... ({0})", DateTime.Now); using (var wa = new WritingArchive(_zipName)) { var countWithMethods = 0; Action <Context> cbContext = ctx => { _logger.Info("\t> {0}".FormatEx(ctx.SST.EnclosingType)); // ReSharper disable once AccessToDisposedClosure wa.Add(ctx); if (ctx.SST.Methods.Count > 0) { countWithMethods++; } }; new ContextSolutionAnalysis(solution, _logger, cbContext).AnalyzeAllProjects(); _logger.EndPossibleErrorBlock(); Console.WriteLine("Analysis finished! ({0})", DateTime.Now); Console.WriteLine( "found {0} context(s), {1} contain(s) method declarations", wa.NumItemsAdded, countWithMethods); } }
public string Merge(IKaVESet <string> relZips) { Asserts.NotNull(relZips); Asserts.That(relZips.Count > 0); foreach (var relZip in relZips) { var zip = _io.GetFullPath_In(relZip); Asserts.That(File.Exists(zip)); } var relZipOut = relZips.First(); _log.NextGroup(relZips.Count, relZipOut); var zipOut = _io.GetFullPath_Merged(relZipOut); _io.EnsureParentExists(zipOut); var numEvents = 0; using (var wa = new WritingArchive(zipOut)) { foreach (var e in ReadArchives(relZips)) { numEvents++; wa.Add(e); } } _log.Result(numEvents); return(relZipOut); }
public void NoExceptionNoReport() { using (var wa = new WritingArchive(_zip)) { wa.Add(Event(1)); wa.Add(Event(2)); } Exception e = null; using (var sut = new FailsafeIDEEventReadingArchive(_zip, (f, ex) => { e = ex; })) { // ReSharper disable once ReturnValueOfPureMethodIsNotUsed sut.ReadAllLazy().ToList(); } Assert.Null(e); }
private void Write(params string[] entries) { using (var sut = new WritingArchive(_zipPath)) { foreach (var entry in entries) { sut.Add(entry); } } }
public void NothingHappensOnSecondDispose() { var sut = new WritingArchive(_zipPath); Assert.False(File.Exists(_zipPath)); sut.Add("x"); Assert.True(File.Exists(_zipPath)); sut.Dispose(); Assert.True(File.Exists(_zipPath)); File.Delete(_zipPath); sut.Dispose(); Assert.False(File.Exists(_zipPath)); }
public void DeserializationIssuesNoNotCrashTheCleanerAndAreReported() { var zip = Path.Combine(MergedDir, "a.zip"); using (var wa = new WritingArchive(zip)) { wa.Add(E("a", 10)); wa.AddAsPlainText("xxx"); wa.Add(E("a", 20)); wa.AddAsPlainText("yyy"); wa.Add(E("a", 30)); } Clean("a.zip"); AssertEvents("a.zip", E("a", 10), E("a", 20), E("a", 30)); Mock.Get(_log).Verify( l => l.DeserializationError(zip, "1.json", It.IsAny <JsonReaderException>()), Times.Once); Mock.Get(_log).Verify( l => l.DeserializationError(zip, "3.json", It.IsAny <JsonReaderException>()), Times.Once); }
public void BrokenEventsDoNotStopMergingAndAreReported() { Add(@"a.zip", Event("a")); var zip = Path.Combine(RawDir, "b.zip"); using (var wa = new WritingArchive(zip)) { wa.Add(Event("b1")); wa.AddAsPlainText("xxx"); wa.Add(Event("b2")); wa.AddAsPlainText("xxx"); wa.Add(Event("b3")); } Merge(@"a.zip", @"b.zip"); Expect(@"a.zip", Event("a"), Event("b1"), Event("b2"), Event("b3")); Mock.Get(_log).Verify( l => l.DeserializationError(zip, "1.json", It.IsAny <JsonReaderException>()), Times.Once); Mock.Get(_log).Verify( l => l.DeserializationError(zip, "3.json", It.IsAny <JsonReaderException>()), Times.Once); }
public void ReadArchive_HappyPath() { var zipFileName = Path.Combine(_testRoot, "a.zip"); var expecteds = new List <string> { "a", "b" }; using (var wa = new WritingArchive(zipFileName)) { foreach (var c in expecteds) { wa.Add(c); } } using (var ra = _sut.ReadArchive(zipFileName)) { var actuals = ra.GetAll <string>(); CollectionAssert.AreEquivalent(expecteds, actuals); } }
private void AddFile(string fileName, params IDEEvent[] events) { var fullName = Path.Combine(_dirIn, fileName); var dir = Path.GetDirectoryName(fullName); if (dir != null) { if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } } using (var wa = new WritingArchive(fullName)) { foreach (var e in events) { wa.Add(e); } } }