public void Reader_Roundtrip(Func <string, bool, ITabularReader> buildReader, Func <Stream, ITabularWriter> buildWriter) { string filePath = "ValidSample.xsv"; // Write a valid file with some values which require CSV escaping WriteValidSample(new FileStream(filePath, FileMode.Create, FileAccess.ReadWrite), buildWriter); // Direct Copy the file from the reader to the writer - every value unescaped and then escaped using (ITabularReader reader = buildReader(filePath, true)) { using (ITabularWriter writer = buildWriter(new FileStream(filePath + ".new", FileMode.Create, FileAccess.ReadWrite))) { writer.SetColumns(reader.Columns); while (reader.NextRow()) { for (int i = 0; i < reader.CurrentRowColumns; ++i) { writer.Write(reader.Current(i).ToString8()); } writer.NextRow(); } } } // Verify files are identical string fileBefore = File.ReadAllText(filePath); string fileAfter = File.ReadAllText(filePath + ".new"); Assert.AreEqual(fileBefore, fileAfter); }
public void Writer_WriteValidUsingAllOverloads(Stream stream, Func <Stream, ITabularWriter> buildWriter) { String8Set names = String8Set.Split(String8.Convert("Jeff,Bill,Todd,\\Barry\\", new byte[30]), UTF8.Comma, new int[5]); using (ITabularWriter w = buildWriter(stream)) { Assert.AreEqual(0, w.RowCountWritten); w.SetColumns(new string[] { "ID", "IsEven", "Backslash", "Today", "Name", "Description" }); Assert.AreEqual(0, w.RowCountWritten); for (int i = 0; i < 10; ++i) { w.Write(i); w.Write(i % 2 == 0); w.Write(UTF8.Backslash); w.Write(new DateTime(2017, 05, 03, 0, 0, 0, DateTimeKind.Utc)); w.Write(names[i % names.Count]); w.WriteValueStart(); w.WriteValuePart(i + 1); w.WriteValuePart(i % 2 == 1); w.WriteValuePart(UTF8.Quote); w.WriteValuePart(new DateTime(2017, 05, 01, 0, 0, 0, DateTimeKind.Utc)); w.WriteValuePart(names[i % names.Count]); w.WriteValueEnd(); Assert.AreEqual(i, w.RowCountWritten); w.NextRow(); Assert.AreEqual(i + 1, w.RowCountWritten); Assert.AreEqual(stream.Position, w.BytesWritten); } } }
private static void WriteSizeSummary(ITabularReader reader, ITabularWriter writer) { if (reader != null) { long bytesRead = reader.BytesRead; if (bytesRead <= 0) { Trace.WriteLine(String.Format("Read: {0:n0} rows.", reader.RowCountRead)); } else { Trace.WriteLine(String.Format("Read: {0}, {1:n0} rows.", bytesRead.SizeString(), reader.RowCountRead)); } } if (writer != null) { long bytesWritten = writer.BytesWritten; if (bytesWritten <= 0) { Trace.WriteLine(String.Format("Wrote: {0:n0} rows.", writer.RowCountWritten)); } else { Trace.WriteLine(String.Format("Wrote: {0}, {1:n0} rows.", bytesWritten.SizeString(), writer.RowCountWritten)); } } }
public void WriteTo(ITabularWriter writer, String8Block block) { if (writer.RowCountWritten == 0) { writer.SetColumns(new string[] { "UserID", "UserGuid", "EmailAddress", "Region", "Browser", "OS", "IsPremiumUser", "JoinDate" }); } block.Clear(); writer.Write(this.ID); writer.Write(block.GetCopy(this.Guid.ToString())); writer.Write(block.GetCopy(this.EmailAddress)); writer.Write(block.GetCopy(this.Region)); writer.Write(block.GetCopy(this.Browser)); writer.Write(block.GetCopy(this.OS)); writer.Write(this.IsPremiumUser); writer.Write(this.JoinDate); writer.NextRow(); }
private static void Compare(string oldFilePath, string newFilePath, string outputFilePath, string columnIdentifier) { String8Block block = new String8Block(); HashSet <String8> oldValues = new HashSet <String8>(); HashSet <String8> newValues = new HashSet <String8>(); using (ITabularReader oldReader = TabularFactory.BuildReader(oldFilePath)) { int leftColumnIndex = oldReader.ColumnIndex(columnIdentifier); while (oldReader.NextRow()) { oldValues.Add(block.GetCopy(oldReader.Current(leftColumnIndex))); } Trace.WriteLine(String.Format("Old: {0:n0} values for \"{1}\" in {2:n0} rows.", oldValues.Count, columnIdentifier, oldReader.RowCountRead)); } using (ITabularReader newReader = TabularFactory.BuildReader(newFilePath)) { int rightColumnIndex = newReader.ColumnIndex(columnIdentifier); while (newReader.NextRow()) { newValues.Add(block.GetCopy(newReader.Current(rightColumnIndex))); } Trace.WriteLine(String.Format("New: {0:n0} values for \"{1}\" in {2:n0} rows.", newValues.Count, columnIdentifier, newReader.RowCountRead)); } HashSet <String8> oldOnly = new HashSet <String8>(oldValues); oldOnly.ExceptWith(newValues); HashSet <String8> newOnly = new HashSet <String8>(newValues); newOnly.ExceptWith(oldValues); Trace.WriteLine(String.Format("{0:n0} values were only in \"{1}\".\r\n{2:n0} values were only in \"{3}\".", oldOnly.Count, oldFilePath, newOnly.Count, newFilePath)); String8 leftMarker = String8.Convert("-", new byte[1]); String8 rightMarker = String8.Convert("+", new byte[1]); using (ITabularWriter writer = TabularFactory.BuildWriter(outputFilePath)) { writer.SetColumns(new string[] { "In", columnIdentifier }); foreach (String8 value in oldOnly) { writer.Write(leftMarker); writer.Write(value); writer.NextRow(); } foreach (String8 value in newOnly) { writer.Write(rightMarker); writer.Write(value); writer.NextRow(); } } }
public void Initialize() { String8Block block = new String8Block(); String8[] names = new String8[] { block.GetCopy("Scott"), block.GetCopy("Mike"), block.GetCopy("Jeff"), block.GetCopy("Sophie") }; using (ITabularWriter sample = TabularFactory.BuildWriter(s_sampleFilePath)) { sample.SetColumns(new string[] { "ID", "IsEven", "Count", "WhenAdded", "Name" }); int sum = 0; for (int i = 0; i < 1000; ++i) { sum += i; sample.Write(i); sample.Write((i & 0x1) == 0); sample.Write(sum); sample.Write(new DateTime(2017, 05, 23).AddMinutes(i)); sample.Write(names[i % names.Length]); sample.NextRow(); } } }
public void Dispose() { if (_source != null) { _source.Dispose(); _source = null; } if (_writer != null) { try { _writer.Dispose(); // On Dispose, tell the StreamProvider to publish the table if (_streamProvider != null) { _streamProvider.Publish(_outputFilePath); } } finally { _writer = null; } } }
private void WriteException(ErrorContext context, bool isValid, ITabularWriter writer) { String8Block block = new String8Block(); writer.SetColumns(new string[] { "Valid", "Usage", "ItemCategory", "ErrorMessage", "Values", "InvalidToken", "InvalidTokenIndex" }); writer.Write(isValid); writer.Write(block.GetCopy(context.Usage)); writer.Write(block.GetCopy(context.InvalidValueCategory)); writer.Write(block.GetCopy(context.ErrorMessage)); String8 values = String8.Empty; if (context.ValidValues != null) { foreach (string value in context.ValidValues) { values = block.Concatenate(values, s_delimiter, block.GetCopy(value)); } } writer.Write(values); writer.Write(block.GetCopy(context.InvalidValue ?? "")); writer.Write(context.InvalidTokenIndex); writer.NextRow(); }
private static void MatchBoolCompare(ITabularReader reader, ITabularWriter writer, WhereResult result) { bool value = (bool)result.Value; while (reader.NextRow()) { // Ensure the row has enough columns if (reader.CurrentRowColumns <= result.ColumnIndex) { continue; } // Ensure the value converts bool columnValue; if (!reader.Current(result.ColumnIndex).ToString8().TryToBoolean(out columnValue)) { continue; } int compareResult = columnValue.CompareTo(value); if (!result.Op.Matches(compareResult)) { continue; } result.MatchCount++; // If this is the matching row, write it EchoRow(reader, writer); } }
private static void HtmlInnerText(string inputFilePath, string outputFilePath, string columnsDelimited) { using (ITabularReader reader = TabularFactory.BuildReader(inputFilePath)) { List <int> columnIndicesToEscape = columnsDelimited.Split(',').Select((col) => reader.ColumnIndex(col.Trim())).ToList(); using (ITabularWriter writer = TabularFactory.BuildWriter(outputFilePath)) { writer.SetColumns(reader.Columns); while (reader.NextRow()) { for (int i = 0; i < reader.CurrentRowColumns; ++i) { if (columnIndicesToEscape.Contains(i)) { WriteHtmlEscaped(reader.Current(i).ToString8(), writer); } else { writer.Write(reader.Current(i).ToString8()); } } writer.NextRow(); } WriteSizeSummary(reader, writer); } } }
public static void Write(IStreamProvider streamProvider, string tableRootPath, TableMetadata metadata) { String8Block block = new String8Block(); using (ITabularWriter sw = TabularFactory.BuildWriter(streamProvider.OpenWrite(Path.Combine(tableRootPath, SchemaFileName)), SchemaFileName)) { sw.SetColumns(new string[] { "Name", "Type" }); foreach (ColumnDetails column in metadata.Schema) { sw.Write(block.GetCopy(column.Name)); sw.Write(block.GetCopy(column.Type.Name.ToString())); sw.NextRow(); } } using (ITabularWriter mw = TabularFactory.BuildWriter(streamProvider.OpenWrite(Path.Combine(tableRootPath, MetadataFileName)), MetadataFileName)) { mw.SetColumns(new string[] { "Name", "Context", "Value" }); mw.Write(block.GetCopy("RowCount")); mw.Write(String8.Empty); mw.Write(metadata.RowCount); mw.NextRow(); } streamProvider.WriteAllText(Path.Combine(tableRootPath, ConfigQueryPath), metadata.Query); s_Cache.Add($"{streamProvider}|{tableRootPath}", metadata); }
private static void Copy(string inputFilePath, string outputFilePath, string columnsDelimited) { List <string> columns = new List <string>(); foreach (string columnName in columnsDelimited.Split(',')) { columns.Add(columnName.Trim()); } using (ITabularReader reader = TabularFactory.BuildReader(inputFilePath)) { int[] columnIndices = new int[columns.Count]; for (int i = 0; i < columnIndices.Length; ++i) { columnIndices[i] = reader.ColumnIndex(columns[i]); } using (ITabularWriter writer = TabularFactory.BuildWriter(outputFilePath)) { writer.SetColumns(columns); while (reader.NextRow()) { for (int i = 0; i < columnIndices.Length; ++i) { writer.Write(reader.Current(columnIndices[i]).ToString8()); } writer.NextRow(); } WriteSizeSummary(reader, writer); } } }
private static void HtmlInnerText(string inputFilePath, string outputFilePath, string columnIdentifier) { using (ITabularReader reader = TabularFactory.BuildReader(inputFilePath)) { int columnIndexToEscape = reader.ColumnIndex(columnIdentifier); using (ITabularWriter writer = TabularFactory.BuildWriter(outputFilePath)) { writer.SetColumns(reader.Columns); while (reader.NextRow()) { for (int i = 0; i < reader.CurrentRowColumns; ++i) { if (i == columnIndexToEscape) { WriteHtmlEscaped(reader.Current(i).ToString8(), writer); } else { writer.Write(reader.Current(i).ToString8()); } } writer.NextRow(); } WriteSizeSummary(reader, writer); } } }
public static void WriteHtmlEscaped(String8 value, ITabularWriter writer) { writer.WriteValueStart(); int writeFrom = 0; while (true) { // Look for an Html Tag int startOfTag = value.IndexOf((byte)'<', writeFrom); if (startOfTag == -1) { break; } // Write up to the tag writer.WriteValuePart(value.Substring(writeFrom, startOfTag - writeFrom)); // Find the end of the tag int endOfTag = value.IndexOf((byte)'>', startOfTag + 1); if (endOfTag == -1) { // Error: Unclosed tag, don't write anything else writeFrom = value.Length; break; } writeFrom = endOfTag + 1; } // Write the value after the last tag writer.WriteValuePart(value.Substring(writeFrom)); writer.WriteValueEnd(); }
private static void RowId(string inputFilePath, string outputFilePath, int firstId = 1) { int currentId = firstId; using (ITabularReader reader = TabularFactory.BuildReader(inputFilePath)) { using (ITabularWriter writer = TabularFactory.BuildWriter(outputFilePath)) { List <string> columns = new List <string>(); columns.Add("ID"); columns.AddRange(reader.Columns); writer.SetColumns(columns); while (reader.NextRow()) { writer.Write(currentId); currentId++; for (int i = 0; i < reader.CurrentRowColumns; ++i) { writer.Write(reader.Current(i).ToString8()); } writer.NextRow(); } WriteSizeSummary(reader, writer); } } }
private static void Distinct(string inputFilePath, string outputFilePath, string columnIdentifier) { String8Block block = new String8Block(); HashSet <String8> distinctValues = new HashSet <String8>(); using (ITabularReader reader = TabularFactory.BuildReader(inputFilePath)) { int columnIndex = reader.ColumnIndex(columnIdentifier); using (ITabularWriter writer = TabularFactory.BuildWriter(outputFilePath)) { writer.SetColumns(new string[] { reader.Columns[columnIndex] }); while (reader.NextRow()) { String8 value = reader.Current(columnIndex).ToString8(); if (!distinctValues.Contains(value)) { distinctValues.Add(block.GetCopy(value)); writer.Write(value); writer.NextRow(); } } WriteSizeSummary(reader, writer); } } }
private void Suggest(IHttpRequest request, IHttpResponse response) { using (ITabularWriter writer = WriterForFormat("json", response)) { try { string query = Require(request, "q"); DateTime asOfDate = ParseOrDefault(request.QueryString["asof"], _xDatabaseContext.RequestedAsOfDateTime); SuggestResult result = _suggester.Suggest(query, asOfDate); // If the query is valid and there are no extra values valid next, just return valid if (result.IsValid == true && result.Context == null) { writer.SetColumns(new string[] { "Valid" }); writer.Write(true); writer.NextRow(); } else { WriteException(result, writer); } } catch (Exception ex) { WriteException(ex, writer, false); } } }
private static void Append(string inputFileOrFolderPath, string outputFilePath, string inputFileNamePattern = null) { string[] inputFilePaths; if (Directory.Exists(inputFileOrFolderPath)) { if (String.IsNullOrEmpty(inputFileNamePattern)) { inputFileNamePattern = "*.*"; } inputFilePaths = Directory.GetFiles(inputFileOrFolderPath, inputFileNamePattern); } else { inputFilePaths = new string[] { inputFileOrFolderPath }; } ITabularWriter writer = null; string writerColumns = null; try { foreach (string inputFilePath in inputFilePaths) { using (ITabularReader reader = TabularFactory.BuildReader(inputFilePath)) { // Build the writer, if this is the first file if (writer == null) { writer = TabularFactory.AppendWriter(outputFilePath, reader.Columns); writerColumns = String.Join(", ", reader.Columns); } // Validate columns match string sourceColumns = String.Join(", ", reader.Columns); if (string.Compare(writerColumns, sourceColumns, true) != 0) { throw new InvalidOperationException(string.Format("Can't append to \"{0}\" because the column names don't match.\r\nExpect: {1}\r\nActual: {2}", outputFilePath, writerColumns, sourceColumns)); } // Copy the rows CopyRows(reader, writer); // Write a summary for this input file Trace.WriteLine($" {inputFilePath}, {reader.RowCountRead:n0} rows; {reader.BytesRead.SizeString()}"); } } // Write a summary for the output file WriteSizeSummary(null, writer); } finally { if (writer != null) { writer.Dispose(); writer = null; } } }
private static void Copy(string inputFilePath, string outputFilePath, int rowLimit = -1) { using (ITabularReader reader = TabularFactory.BuildReader(inputFilePath)) { using (ITabularWriter writer = TabularFactory.BuildWriter(outputFilePath)) { writer.SetColumns(reader.Columns); while (reader.NextRow()) { for (int i = 0; i < reader.CurrentRowColumns; ++i) { writer.Write(reader.Current(i).ToString8()); } writer.NextRow(); if (writer.RowCountWritten == rowLimit) { break; } } WriteSizeSummary(reader, writer); } } }
private static void MatchContains(ITabularReader reader, ITabularWriter writer, WhereResult result) { string valueString = (string)result.Value; String8 value = String8.Convert(valueString, new byte[String8.GetLength(valueString)]); while (reader.NextRow()) { // Ensure the row has enough columns if (reader.CurrentRowColumns <= result.ColumnIndex) { continue; } // Match the value if (reader.Current(result.ColumnIndex).ToString8().IndexOf(value) == -1) { continue; } result.MatchCount++; // If this is the matching row, write it EchoRow(reader, writer); } }
private static void NotStartsWith(string inputFilePath, string outputFilePath, string valueColumnIdentifier, string nameColumnIdentifier) { using (ITabularReader reader = TabularFactory.BuildReader(inputFilePath)) { int valueColumnIndex = reader.ColumnIndex(valueColumnIdentifier); int nameColumnIndex = reader.ColumnIndex(nameColumnIdentifier); using (ITabularWriter writer = TabularFactory.BuildWriter(outputFilePath)) { writer.SetColumns(reader.Columns); while (reader.NextRow()) { String8 name = reader.Current(nameColumnIndex).ToString8(); String8 value = reader.Current(valueColumnIndex).ToString8(); if (!value.StartsWith(name)) { for (int i = 0; i < reader.CurrentRowColumns; ++i) { writer.Write(reader.Current(i).ToString8()); } writer.NextRow(); } } WriteSizeSummary(reader, writer); } } }
public void Reader_NewlineVariations(Func <Stream, ITabularWriter> buildWriter, Func <string, bool, ITabularReader> buildReader) { string xsvPath = "NewlineVariations.xsv"; Stream stream = new FileStream(xsvPath, FileMode.Create, FileAccess.ReadWrite); using (ITabularWriter w = buildWriter(stream)) { w.SetColumns(new string[] { "One", "Two", "Three" }); for (int row = 0; row < 3; ++row) { w.Write(3 * row + 1); w.Write(3 * row + 2); w.Write(3 * row + 3); // Write the end of row but then override it long position = stream.Position; w.NextRow(); if (row == 0) { // Row 0 - newline only stream.Seek(position, SeekOrigin.Begin); stream.WriteByte(UTF8.Newline); } else if (row == 2) { // Row 2 - no end of line stream.SetLength(position); } } } using (ITabularReader r = buildReader(xsvPath, true)) { // Verify column heading not clipped even though no '\r' Assert.AreEqual("Three", r.Columns[2]); Assert.IsTrue(r.NextRow()); Assert.AreEqual(3, r.CurrentRowColumns); // Verify last column doesn't have extra '\r' when terminated with '\r\n' Assert.AreEqual("3", r.Current(2).ToString()); Assert.IsTrue(r.NextRow()); Assert.AreEqual(3, r.CurrentRowColumns); // Verify last column not clipped when terminated with '\n' Assert.AreEqual("6", r.Current(2).ToString()); Assert.IsTrue(r.NextRow()); Assert.AreEqual(3, r.CurrentRowColumns); // Verify last column not clipped when unterminated [EOF] Assert.AreEqual("9", r.Current(2).ToString()); Assert.IsFalse(r.NextRow(), "Reader didn't stop after last line without newline"); } }
private static void Generate_WebRequestSample(string basePath, int randomSeed, int userCount, int eventCount, int numberOfDays) { Random r = new Random(randomSeed); DateTime asOfDate = DateTime.UtcNow.Date; String8Block block = new String8Block(); WebRequestGenerator generator; string path; // Generate a set of users and write them out [for a week ago] asOfDate = asOfDate.AddDays(-8); path = Path.Combine(basePath, $"Users.{asOfDate:yyyyMMdd}.r{randomSeed}.{userCount}.csv"); Console.WriteLine($"Writing {path}..."); UserGenerator userGenerator = new UserGenerator(r, asOfDate); List <User> users = userGenerator.Next(userCount); using (ITabularWriter writer = TabularFactory.BuildWriter(path)) { foreach (User user in users) { user.WriteTo(writer, block); } } File.SetLastWriteTimeUtc(path, asOfDate); // Generate WebRequest Data [for a week ago] generator = new WebRequestGenerator(users, r, asOfDate, (eventCount < 1001 ? 10 : 100)); BuildWebRequests(basePath, generator, eventCount, WebRequestWriteMode.All); asOfDate = asOfDate.AddDays(8); // Generate batches of WebRequest sample data [current] for (int day = 0; day < numberOfDays; ++day) { generator = new WebRequestGenerator(users, r, asOfDate, (eventCount < 1001 ? 10 : 100)); if (day == 0) { generator.Issue = new PremiumUserOutage(asOfDate.AddMinutes(18), asOfDate.AddMinutes(104), r); } BuildWebRequests(basePath, generator, eventCount, WebRequestWriteMode.UserIdentityOnly); asOfDate = asOfDate.AddDays(-1); } // Generate one big joinable batch eventCount = 10 * 1000 * 1000; generator = new WebRequestGenerator(users, r, asOfDate, 1000); generator.Issue = new PortRangeBlocked(asOfDate.AddMinutes(1), asOfDate.AddMinutes(180), 11450, 11480); BuildWebRequests(basePath, generator, eventCount, WebRequestWriteMode.UserIdentityOnly); asOfDate = asOfDate.AddDays(-1); // Generate one huge minimal batch eventCount = 100 * 1000 * 1000; generator = new WebRequestGenerator(users, r, asOfDate, 1000); generator.Issue = new UncachedSlowness(asOfDate.AddMinutes(4), asOfDate.AddMinutes(36), r); BuildWebRequests(basePath, generator, eventCount, WebRequestWriteMode.Minimal); Console.WriteLine("Done."); }
public void Dispose() { if (_writer != null) { _writer.Dispose(); _writer = null; } }
public TabularFileWriter(IXTable source, ITabularWriter writer) { _source = source; _writer = writer; _writer.SetColumns(_source.Columns.Select((col) => col.ColumnDetails.Name)); Initialize(); }
private static void Concatenate(string inputFilePath, string outputFilePath, String8 delimiter) { using (ITabularReader reader = TabularFactory.BuildReader(inputFilePath)) { using (ITabularWriter writer = TabularFactory.BuildWriter(outputFilePath)) { writer.SetColumns(reader.Columns); String8Block block = new String8Block(); String8[] lastValues = new String8[reader.CurrentRowColumns]; String8[] combinedValues = new String8[reader.CurrentRowColumns]; while (reader.NextRow()) { String8 firstColumn = reader.Current(0).ToString8(); if (reader.RowCountRead == 2) { // First Row - Get the first ID only combinedValues[0] = block.GetCopy(firstColumn); } else if (firstColumn.CompareTo(combinedValues[0], true) != 0) { // If we have a new ID (and not first row) // Write concatenated values for previous ID WriteCombinedRow(writer, combinedValues); // Reset for this ID block.Clear(); combinedValues[0] = block.GetCopy(firstColumn); for (int i = 1; i < combinedValues.Length; ++i) { combinedValues[i] = String8.Empty; } } // Concatenate non-duplicate values to "row in progress" for (int i = 1; i < reader.CurrentRowColumns; ++i) { String8 value = reader.Current(i).ToString8(); if (lastValues[i] != value) { lastValues[i] = value; combinedValues[i] = block.Concatenate(combinedValues[i], delimiter, value); } } } // After last row, write out values so far WriteCombinedRow(writer, combinedValues); WriteSizeSummary(reader, writer); } } }
public Logger(IStreamProvider streamProvider, string outputFilePath) { string logFilePath = Path.Combine(outputFilePath, "Log.csv"); _writer = TabularFactory.BuildWriter(streamProvider.OpenWrite(logFilePath), logFilePath); _writer.SetColumns(new string[] { "WhenUtc", "MessageType", "SourceComponent", "Message" }); _block = new String8Block(); }
private static void WriteCombinedRow(ITabularWriter writer, String8[] values) { for (int i = 0; i < values.Length; ++i) { writer.Write(values[i]); } writer.NextRow(); }
/// <summary> /// Initializes a new instance of the <see cref="DataRetrievalInfoTabular"/> class. /// </summary> /// <param name="mappingSet"> /// The mapping set of the dataflow found in the sdmx query /// </param> /// <param name="query"> /// The current SDMX Query object /// </param> /// <param name="connectionStringSettings"> /// The Mapping Store connection string settings /// </param> /// <param name="tabularWriter"> /// The tabular Writer. /// </param> public DataRetrievalInfoTabular( MappingSetEntity mappingSet, IDataQuery query, ConnectionStringSettings connectionStringSettings, ITabularWriter tabularWriter) : base(mappingSet, query, connectionStringSettings) { this._tabularWriter = tabularWriter; }
public void Writer_Performance(Func <Stream, ITabularWriter> buildWriter) { String8Block block = new String8Block(); String8 d1 = block.GetCopy("Description 1"); String8 d2 = block.GetCopy("Description 2"); String8 s1 = block.GetCopy("Source: Internal"); String8 s2 = block.GetCopy("Source: External"); using (MemoryStream s = new MemoryStream()) { int iterations = 50; long bytesWritten = 0; int rowsWritten = 0; // Tsv Write goal: 100MB/sec [Surface Book] // NOTE: Tsv Write performance is very sensitive the mix of text and numbers written. Writing integers is slower. Verify.PerformanceByBytes(50 * LongExtensions.Megabyte, () => { for (int iteration = 0; iteration < iterations; ++iteration) { s.Seek(0, SeekOrigin.Begin); ITabularWriter writer = buildWriter(s); writer.SetColumns(new string[] { "LineNumber", "Count", "Description", "Source" }); int sum = 0; for (int row = 1; row < 10000; ++row) { sum += row; writer.Write(row); writer.Write(sum); if (row % 2 == 0) { writer.Write(d1); writer.Write(s1); } else { writer.Write(d2); writer.Write(s2); } writer.NextRow(); } bytesWritten += writer.BytesWritten; rowsWritten += writer.RowCountWritten; } return(bytesWritten); }); } }
private static void CopyRows(ITabularReader reader, ITabularWriter writer) { while (reader.NextRow()) { for (int i = 0; i < reader.CurrentRowColumns; ++i) { writer.Write(reader.Current(i).ToString8()); } writer.NextRow(); } }