public void MultipleEntries() { var fileName = Guid.NewGuid().ToString(); var file = new ZipStreamWriter(fileName, "pepe"); file.WriteLine("grillo"); file.DisposeSafely(); var fileBis = new ZipStreamWriter(fileName, "pepeBis"); fileBis.WriteLine("the"); fileBis.WriteLine("best"); fileBis.DisposeSafely(); var lines = QuantConnect.Compression.Unzip(fileName).ToList(); Assert.AreEqual(2, lines.Count); Assert.AreEqual("pepe", lines[0].Key); Assert.AreEqual("pepeBis", lines[1].Key); var entry1 = lines[0].Value.ToList(); Assert.AreEqual(1, entry1.Count); Assert.AreEqual("grillo", entry1[0]); var entry2 = lines[1].Value.ToList(); Assert.AreEqual(2, entry2.Count); Assert.AreEqual("the", entry2[0]); Assert.AreEqual("best", entry2[1]); File.Delete(fileName); }
/// <summary> /// Write this file to disk with the given data. /// </summary> /// <param name="filePath">The full path to the new file</param> /// <param name="data">The data to write as a list of dates and strings</param> /// <remarks>The reason we have the data as IEnumerable(DateTime, string) is to support /// a generic write that works for all resolutions. In order to merge in hour/daily case I need the /// date of the data to correctly merge the two. In order to support writing ticks I need to allow /// two data points to have the same time. Thus I cannot use a single list of just strings nor /// a sorted dictionary of DateTimes and strings. </remarks> private void WriteFile(string filePath, List <TimedLine> data) { if (data == null || data.Count == 0) { return; } var date = data[0].Time; // Generate this csv entry name var entryName = LeanData.GenerateZipEntryName(_symbol, date, _resolution, _tickType); // Check disk once for this file ahead of time, reuse where possible var fileExists = File.Exists(filePath); // If our file doesn't exist its possible the directory doesn't exist, make sure at least the directory exists if (!fileExists) { Directory.CreateDirectory(Path.GetDirectoryName(filePath)); } // Handle merging of files // Only merge on files with hour/daily resolution, that exist, and can be loaded string finalData = null; if (_writePolicy == WritePolicy.Append) { var streamWriter = new ZipStreamWriter(filePath, entryName); foreach (var tuple in data) { streamWriter.WriteLine(tuple.Line); } streamWriter.DisposeSafely(); } else if (_writePolicy == WritePolicy.Merge && fileExists && TryLoadFile(filePath, entryName, date, out var rows)) { // Preform merge on loaded rows foreach (var timedLine in data) { rows[timedLine.Time] = timedLine.Line; } // Final merged data product finalData = string.Join("\n", rows.Values); } else { // Otherwise just extract the data from the given list. finalData = string.Join("\n", data.Select(x => x.Line)); } if (finalData != null) { var bytes = Encoding.UTF8.GetBytes(finalData); _dataCacheProvider.Store($"{filePath}#{entryName}", bytes); } Log.Debug($"LeanDataWriter.Write(): Appended: {filePath} @ {entryName}"); }
public void Create() { var fileName = Guid.NewGuid().ToString(); var file = new ZipStreamWriter(fileName, "pepe"); file.WriteLine("grillo"); file.WriteLine("the"); file.WriteLine("best"); file.DisposeSafely(); var lines = QuantConnect.Compression.ReadLines(fileName).ToList(); Assert.AreEqual(3, lines.Count); Assert.AreEqual("grillo", lines[0]); Assert.AreEqual("the", lines[1]); Assert.AreEqual("best", lines[2]); File.Delete(fileName); }