public void ShouldNotWriteSchemaIfNoSchemaProvided() { StringWriter stringWriter = new StringWriter(); SeparatedValueWriter writer = new SeparatedValueWriter(stringWriter, new SeparatedValueOptions() { IsFirstRecordSchema = true }); writer.Write(new string[] { "a" }); string output = stringWriter.ToString(); string expected = "a" + Environment.NewLine; Assert.Equal(expected, output); }
private static string writeValues(object[] values) { var schema = getSchema(); using (MemoryStream stream = new MemoryStream()) { using (SeparatedValueWriter writer = new SeparatedValueWriter(stream, schema)) { writer.Write(values); } stream.Position = 0; byte[] encoded = stream.ToArray(); return(Encoding.Default.GetString(encoded)); } }
/// <summary> /// Gets a typed writer for writing the objects to the file. /// </summary> /// <param name="writer">The writer to use.</param> /// <param name="options">The separate value options to use.</param> /// <returns>The typed writer.</returns> public ITypedWriter <object> GetWriter(TextWriter writer, SeparatedValueOptions options = null) { var injector = new SeparatedValueSchemaInjector(); var valueWriter = new SeparatedValueWriter(writer, injector, options); var multiWriter = new MultiplexingTypedWriter(valueWriter, this); foreach (var matcher in matchers) { injector.When((values) => matcher.IsMatch).Use(matcher.TypeMapper.GetSchema()); } if (defaultMatcher != nonMatcher) { injector.WithDefault(defaultMatcher.TypeMapper.GetSchema()); } return(multiWriter); }
public void TestWriteFlatFile_IgnoredColumns() { DataTable table = new DataTable(); DataColumn idColumn = table.Columns.Add("id", typeof(int)); DataColumn nameColumn = table.Columns.Add("name", typeof(string)); DataColumn createdColumn = table.Columns.Add("created", typeof(DateTime)); DataColumn avgColumn = table.Columns.Add("avg", typeof(decimal)); table.Constraints.Add("PK_blah", idColumn, true); DataRow bobRow = table.Rows.Add(new object[] { 1, "Bob", new DateTime(2018, 07, 16), 12.34m }); DataRow johnRow = table.Rows.Add(new object[] { 2, "John", new DateTime(2018, 07, 17), 23.45m }); DataRow susanRow = table.Rows.Add(new object[] { 3, "Susan", new DateTime(2018, 07, 18), 34.56m }); var options = new SeparatedValueOptions() { IsFirstRecordSchema = true, RecordSeparator = "\r\n", FormatProvider = CultureInfo.InvariantCulture }; var stringWriter = new StringWriter(); var schema = new SeparatedValueSchema(); schema.AddColumn(new IgnoredColumn("i0")); schema.AddColumn(new Int32Column("id")); schema.AddColumn(new IgnoredColumn("i1")); schema.AddColumn(new StringColumn("name")); schema.AddColumn(new IgnoredColumn("i2")); schema.AddColumn(new DateTimeColumn("created") { OutputFormat = "MM/dd/yyyy" }); schema.AddColumn(new IgnoredColumn("i3")); schema.AddColumn(new DecimalColumn("avg")); schema.AddColumn(new IgnoredColumn("i4")); var csvWriter = new SeparatedValueWriter(stringWriter, schema, options); table.WriteFlatFile(csvWriter); string output = stringWriter.ToString(); Assert.AreEqual(@"i0,id,i1,name,i2,created,i3,avg,i4 ,1,,Bob,,07/16/2018,,12.34, ,2,,John,,07/17/2018,,23.45, ,3,,Susan,,07/18/2018,,34.56, ", output); }
public void ShouldRoundTrip() { const string message = @"Tom,Hanselman,2016-06-0426 Walking Ice,Ace "; StringReader stringReader = new StringReader(message); SeparatedValueSchema outerSchema = new SeparatedValueSchema(); outerSchema.AddColumn(new StringColumn("FirstName")); outerSchema.AddColumn(new StringColumn("LastName")); FixedLengthSchema innerSchema = new FixedLengthSchema(); innerSchema.AddColumn(new DateTimeColumn("StartDate") { InputFormat = "yyyy-MM-dd", OutputFormat = "yyyy-MM-dd" }, 10); innerSchema.AddColumn(new Int32Column("Age"), 2); innerSchema.AddColumn(new StringColumn("StageName"), new Window(20) { Alignment = FixedAlignment.RightAligned }); outerSchema.AddColumn(new FixedLengthComplexColumn("PlayerStats", innerSchema)); outerSchema.AddColumn(new StringColumn("Nickname")); SeparatedValueReader reader = new SeparatedValueReader(stringReader, outerSchema); Assert.True(reader.Read(), "A record should have been read."); object[] values = reader.GetValues(); Assert.Equal("Tom", values[0]); Assert.Equal("Hanselman", values[1]); Assert.IsType <object[]>(values[2]); object[] playerValues = (object[])values[2]; Assert.Equal(new DateTime(2016, 06, 04), playerValues[0]); Assert.Equal(26, playerValues[1]); Assert.Equal("Walking Ice", playerValues[2]); Assert.Equal("Ace", values[3]); StringWriter stringWriter = new StringWriter(); SeparatedValueWriter writer = new SeparatedValueWriter(stringWriter, outerSchema); writer.Write(values); string output = stringWriter.GetStringBuilder().ToString(); Assert.Equal(message, output); }
public void TestWriteFlatFile_MatchingSchema() { DataTable table = new DataTable(); DataColumn idColumn = table.Columns.Add("id", typeof(int)); table.Columns.Add("name", typeof(string)); table.Columns.Add("created", typeof(DateTime)); table.Columns.Add("avg", typeof(decimal)); table.Constraints.Add("PK_blah", idColumn, true); table.Rows.Add(new object[] { 1, "Bob", new DateTime(2018, 07, 16), 12.34m }); table.Rows.Add(new object[] { 2, "John", new DateTime(2018, 07, 17), 23.45m }); table.Rows.Add(new object[] { 3, "Susan", new DateTime(2018, 07, 18), 34.56m }); table.Rows.Add(new object[] { 4, null, null, null }); var options = new SeparatedValueOptions() { IsFirstRecordSchema = true, RecordSeparator = "\r\n", FormatProvider = CultureInfo.InvariantCulture }; var stringWriter = new StringWriter(); var schema = new SeparatedValueSchema(); schema.AddColumn(new Int32Column("id")); schema.AddColumn(new StringColumn("name")); schema.AddColumn(new DateTimeColumn("created") { OutputFormat = "MM/dd/yyyy" }); schema.AddColumn(new DecimalColumn("avg")); var csvWriter = new SeparatedValueWriter(stringWriter, schema, options); table.WriteFlatFile(csvWriter); string output = stringWriter.ToString(); Assert.AreEqual(@"id,name,created,avg 1,Bob,07/16/2018,12.34 2,John,07/17/2018,23.45 3,Susan,07/18/2018,34.56 4,,, ", output); }
public void Write(string outputFilePath, DataSource dataSource, FileSchemaBase schema) { using (var fileStreamWriter = new StreamWriter(outputFilePath, true)) { switch (schema) { case FixedLengthFileSchema fixedLengthFileSchema: foreach (var rowData in dataSource.Data) { var rowSchema = fixedLengthFileSchema.FixedLengthRecordSchemas.FirstOrDefault(x => x.RecordIdentifier.Equals(rowData.Identifier)); if (rowSchema == null) { throw new Exception($"No such identifier defined: '{rowData.Identifier}'"); } var fixedLengthWriter = new FixedLengthWriter(fileStreamWriter, rowSchema.FixedLengthSchema); fixedLengthWriter.Write(rowData.DataArray); } break; case SeparatedFileSchema separatedFileSchema: foreach (var rowData in dataSource.Data) { var rowSchema = separatedFileSchema.SeparatedRecordSchemas.FirstOrDefault(x => x.RecordIdentifier.Equals(rowData.Identifier)); if (rowSchema == null) { throw new Exception($"No such identifier defined: '{rowData.Identifier}'"); } var separatedWriter = new SeparatedValueWriter(fileStreamWriter, rowSchema.SeparatedValueSchema, new SeparatedValueOptions { Separator = separatedFileSchema.Delimeter }); separatedWriter.Write(rowData.DataArray); } break; } } }
/// <summary> /// Write a single set of statistics /// </summary> /// <param name="svw">SeparatedValueWriter to write to</param> /// <param name="stat">DatStatistics object to write out</param> /// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param> /// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param> private void WriteIndividual(SeparatedValueWriter svw, DatStatistics stat, bool baddumpCol, bool nodumpCol) { string[] values = new string[] { stat.DisplayName, stat.Statistics.TotalSize.ToString(), stat.MachineCount.ToString(), stat.Statistics.RomCount.ToString(), stat.Statistics.DiskCount.ToString(), stat.Statistics.CRCCount.ToString(), stat.Statistics.MD5Count.ToString(), stat.Statistics.SHA1Count.ToString(), stat.Statistics.SHA256Count.ToString(), stat.Statistics.SHA384Count.ToString(), stat.Statistics.SHA512Count.ToString(), baddumpCol ? stat.Statistics.BaddumpCount.ToString() : string.Empty, nodumpCol ? stat.Statistics.NodumpCount.ToString() : string.Empty, }; svw.WriteValues(values); svw.Flush(); }
/// <summary> /// Write out the header to the stream, if any exists /// </summary> /// <param name="svw">SeparatedValueWriter to write to</param> /// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param> /// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param> private void WriteHeader(SeparatedValueWriter svw, bool baddumpCol, bool nodumpCol) { string[] headers = new string[] { "File Name", "Total Size", "Games", "Roms", "Disks", "# with CRC", "# with MD5", "# with SHA-1", "# with SHA-256", "# with SHA-384", "# with SHA-512", baddumpCol ? "BadDumps" : string.Empty, nodumpCol ? "Nodumps" : string.Empty, }; svw.WriteHeader(headers); svw.Flush(); }
public void TestWriteFlatFile_MissingColumn_WritesNull() { DataTable table = new DataTable(); DataColumn idColumn = table.Columns.Add("id", typeof(int)); DataColumn nameColumn = table.Columns.Add("name", typeof(string)); //DataColumn createdColumn = table.Columns.Add("created", typeof(DateTime)); DataColumn avgColumn = table.Columns.Add("avg", typeof(decimal)); table.Constraints.Add("PK_blah", idColumn, true); DataRow bobRow = table.Rows.Add(new object[] { 1, "Bob", 12.34m }); DataRow johnRow = table.Rows.Add(new object[] { 2, "John", 23.45m }); DataRow susanRow = table.Rows.Add(new object[] { 3, "Susan", 34.56m }); var options = new SeparatedValueOptions() { IsFirstRecordSchema = true }; var stringWriter = new StringWriter(); var schema = new SeparatedValueSchema(); schema.AddColumn(new Int32Column("id")); schema.AddColumn(new StringColumn("name")); schema.AddColumn(new DateTimeColumn("created") { OutputFormat = "MM/dd/yyyy" }); schema.AddColumn(new DecimalColumn("avg")); var csvWriter = new SeparatedValueWriter(stringWriter, schema, options); table.WriteFlatFile(csvWriter); string output = stringWriter.ToString(); Assert.AreEqual(@"id,name,created,avg 1,Bob,,12.34 2,John,,23.45 3,Susan,,34.56 ", output); }
/// <summary> /// Write out Game start using the supplied StreamWriter /// </summary> /// <param name="svw">SeparatedValueWriter to output to</param> /// <param name="datItem">DatItem object to be output</param> /// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param> private void WriteDatItem(SeparatedValueWriter svw, DatItem datItem) { // No game should start with a path separator datItem.Machine.Name = datItem.Machine.Name.TrimStart(Path.DirectorySeparatorChar); // Pre-process the item name ProcessItemName(datItem, true); // Build the state switch (datItem.ItemType) { case ItemType.Rom: var rom = datItem as Rom; string[] fields = new string[] { rom.Machine.Name, rom.Machine.Description, Header.FileName, rom.Machine.CloneOf, rom.Machine.Year, rom.Machine.Manufacturer, rom.Machine.Category, rom.Machine.Players, rom.Machine.Rotation, rom.Machine.Control, rom.ItemStatus.ToString(), rom.Machine.DisplayCount, rom.Machine.DisplayType, rom.AltName, rom.AltTitle, rom.Machine.Comment, rom.Machine.Buttons, }; svw.WriteValues(fields); break; } svw.Flush(); }
public void TestReadWrite_Comments() { StringWriter output = new StringWriter(); SeparatedValueWriter writer = new SeparatedValueWriter(output); writer.Write(new[] { "a", "b", "c" }); writer.WriteRaw("# Hello, world!!!", true); writer.Write(new[] { "d", "e", "f" }); StringReader input = new StringReader(output.ToString()); SeparatedValueReader reader = new SeparatedValueReader(input); reader.RecordRead += (sender, e) => { e.IsSkipped = e.Values.Length > 0 && e.Values[0].StartsWith("#"); }; Assert.IsTrue(reader.Read()); CollectionAssert.AreEqual(new[] { "a", "b", "c" }, reader.GetValues()); Assert.IsTrue(reader.Read()); CollectionAssert.AreEqual(new[] { "d", "e", "f" }, reader.GetValues()); Assert.IsFalse(reader.Read()); }
public void TestReadFlatFile_ExtractsSchema_PopulatesTable() { SeparatedValueSchema schema = new SeparatedValueSchema(); schema.AddColumn(new Int32Column("id")) .AddColumn(new StringColumn("name")) .AddColumn(new DateTimeColumn("created") { InputFormat = "MM/dd/yyyy", OutputFormat = "MM/dd/yyyy" }) .AddColumn(new DecimalColumn("avg")); SeparatedValueOptions options = new SeparatedValueOptions() { IsFirstRecordSchema = true }; using (MemoryStream stream = new MemoryStream()) { using (SeparatedValueWriter builder = new SeparatedValueWriter(stream, schema, options)) { builder.Write(new object[] { 123, "Bob", new DateTime(2012, 12, 31), 3.14159m }); } stream.Position = 0; DataTable table = new DataTable(); IReader parser = new SeparatedValueReader(stream, options); table.ReadFlatFile(parser); Assert.AreEqual(4, table.Columns.Count, "The wrong number of columns were extracted."); Assert.IsTrue(table.Columns.Contains("id"), "The ID column was not extracted."); Assert.IsTrue(table.Columns.Contains("name"), "The name column was not extracted."); Assert.IsTrue(table.Columns.Contains("created"), "The created column was not extracted."); Assert.IsTrue(table.Columns.Contains("avg"), "The AVG column was not extracted."); Assert.AreEqual(1, table.Rows.Count, "Not all of the records were extracted."); DataRow row = table.Rows[0]; object[] expected = new object[] { "123", "Bob", "12/31/2012", "3.14159" }; object[] values = row.ItemArray; CollectionAssert.AreEqual(expected, values, "The wrong values were extracted"); } }
public void TestReadFlatFile_ExtractsSchema_PopulatesTable() { SeparatedValueSchema schema = new SeparatedValueSchema(); schema.AddColumn(new Int32Column("id")) .AddColumn(new StringColumn("name")) .AddColumn(new DateTimeColumn("created") { InputFormat = "MM/dd/yyyy", OutputFormat = "MM/dd/yyyy" }) .AddColumn(new DecimalColumn("avg")); SeparatedValueOptions options = new SeparatedValueOptions() { IsFirstRecordSchema = true }; StringWriter stringWriter = new StringWriter(); SeparatedValueWriter builder = new SeparatedValueWriter(stringWriter, schema, options); builder.Write(new object[] { 123, "Bob", new DateTime(2012, 12, 31), 3.14159m }); StringReader stringReader = new StringReader(stringWriter.ToString()); DataTable table = new DataTable(); IReader parser = new SeparatedValueReader(stringReader, options); table.ReadFlatFile(parser); Assert.Equal(4, table.Columns.Count); Assert.True(table.Columns.Contains("id"), "The ID column was not extracted."); Assert.True(table.Columns.Contains("name"), "The name column was not extracted."); Assert.True(table.Columns.Contains("created"), "The created column was not extracted."); Assert.True(table.Columns.Contains("avg"), "The AVG column was not extracted."); Assert.Equal(1, table.Rows.Count); DataRow row = table.Rows[0]; object[] expected = new object[] { "123", "Bob", "12/31/2012", "3.14159" }; object[] values = row.ItemArray; Assert.Equal(expected, values); }
public void TestWriteFlatFile_ExtraColumn_Ignores() { DataTable table = new DataTable(); DataColumn idColumn = table.Columns.Add("id", typeof(int)); table.Columns.Add("name", typeof(string)); table.Columns.Add("created", typeof(DateTime)); table.Columns.Add("avg", typeof(decimal)); table.Constraints.Add("PK_blah", idColumn, true); table.Rows.Add(new object[] { 1, "Bob", new DateTime(2018, 07, 16), 12.34m }); table.Rows.Add(new object[] { 2, "John", new DateTime(2018, 07, 17), 23.45m }); table.Rows.Add(new object[] { 3, "Susan", new DateTime(2018, 07, 18), 34.56m }); var options = new SeparatedValueOptions() { IsFirstRecordSchema = true }; var stringWriter = new StringWriter(); var schema = new SeparatedValueSchema(); schema.AddColumn(new Int32Column("id")); schema.AddColumn(new StringColumn("name")); schema.AddColumn(new DecimalColumn("avg")); var csvWriter = new SeparatedValueWriter(stringWriter, schema, options); table.WriteFlatFile(csvWriter); string output = stringWriter.ToString(); Assert.AreEqual(@"id,name,avg 1,Bob,12.34 2,John,23.45 3,Susan,34.56 ", output); }
public void TestGetValues_BlankTrailingSection_ReturnsNull() { SeparatedValueOptions options = new SeparatedValueOptions() { IsFirstRecordSchema = true }; SeparatedValueSchema schema = new SeparatedValueSchema(); schema.AddColumn(new Int32Column("id")) .AddColumn(new StringColumn("name")) .AddColumn(new DateTimeColumn("created") { InputFormat = "M/d/yyyy", OutputFormat = "M/d/yyyy" }) .AddColumn(new StringColumn("trailing")); object[] sources = new object[] { 123, "Bob", new DateTime(2013, 1, 19), "" }; StringWriter stringWriter = new StringWriter(); SeparatedValueWriter builder = new SeparatedValueWriter(stringWriter, schema, options); builder.Write(sources); StringReader stringReader = new StringReader(stringWriter.ToString()); SeparatedValueReader parser = new SeparatedValueReader(stringReader, schema, options); Assert.True(parser.Read(), "No records were found."); object[] values = parser.GetValues(); Assert.Equal(schema.ColumnDefinitions.Count, values.Length); Assert.Equal(sources[0], values[0]); Assert.Equal(sources[1], values[1]); Assert.Equal(sources[2], values[2]); Assert.Equal(null, values[3]); Assert.False(parser.Read(), "Too many records were found."); }
/// <summary> /// Write out DatItem using the supplied StreamWriter /// </summary> /// <param name="svw">SeparatedValueWriter to output to</param> /// <param name="datItem">DatItem object to be output</param> private void WriteDatItem(SeparatedValueWriter svw, DatItem datItem) { // Separated values should only output Rom and Disk if (datItem.ItemType != ItemType.Disk && datItem.ItemType != ItemType.Rom) { return; } // Build the state // TODO: Can we have some way of saying what fields to write out? Support for read extends to all fields now string[] fields = new string[14]; // 18; fields[0] = Header.FileName; fields[1] = Header.Name; fields[2] = Header.Description; fields[3] = datItem.Machine.Name; fields[4] = datItem.Machine.Description; switch (datItem.ItemType) { case ItemType.Disk: var disk = datItem as Disk; fields[5] = "disk"; fields[6] = string.Empty; fields[7] = disk.Name; fields[8] = string.Empty; fields[9] = string.Empty; fields[10] = disk.MD5?.ToLowerInvariant(); //fields[11] = string.Empty; fields[11] = disk.SHA1?.ToLowerInvariant(); fields[12] = string.Empty; //fields[13] = string.Empty; //fields[14] = string.Empty; //fields[15] = string.Empty; fields[13] = disk.ItemStatus.ToString(); break; case ItemType.Media: var media = datItem as Media; fields[5] = "media"; fields[6] = string.Empty; fields[7] = media.Name; fields[8] = string.Empty; fields[9] = string.Empty; fields[10] = media.MD5?.ToLowerInvariant(); //fields[11] = string.Empty; fields[11] = media.SHA1?.ToLowerInvariant(); fields[12] = media.SHA256?.ToLowerInvariant(); //fields[13] = string.Empty; //fields[14] = string.Empty; //fields[15] = media.SpamSum?.ToLowerInvariant(); fields[13] = string.Empty; break; case ItemType.Rom: var rom = datItem as Rom; fields[5] = "rom"; fields[6] = rom.Name; fields[7] = string.Empty; fields[8] = rom.Size?.ToString(); fields[9] = rom.CRC?.ToLowerInvariant(); fields[10] = rom.MD5?.ToLowerInvariant(); //fields[11] = rom.RIPEMD160?.ToLowerInvariant(); fields[11] = rom.SHA1?.ToLowerInvariant(); fields[12] = rom.SHA256?.ToLowerInvariant(); //fields[13] = rom.SHA384?.ToLowerInvariant(); //fields[14] = rom.SHA512?.ToLowerInvariant(); //fields[15] = rom.SpamSum?.ToLowerInvariant(); fields[13] = rom.ItemStatus.ToString(); break; } svw.WriteString(CreatePrefixPostfix(datItem, true)); svw.WriteValues(fields, false); svw.WriteString(CreatePrefixPostfix(datItem, false)); svw.WriteLine(); svw.Flush(); }
/// <summary> /// Create and open an output file for writing direct from a dictionary /// </summary> /// <param name="outfile">Name of the file to write to</param> /// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param> /// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param> /// <returns>True if the DAT was written correctly, false otherwise</returns> public override bool WriteToFile(string outfile, bool ignoreblanks = false, bool throwOnError = false) { try { logger.User($"Opening file for writing: {outfile}"); FileStream fs = FileExtensions.TryCreate(outfile); // If we get back null for some reason, just log and return if (fs == null) { logger.Warning($"File '{outfile}' could not be created for writing! Please check to see if the file is writable"); return(false); } SeparatedValueWriter svw = new SeparatedValueWriter(fs, new UTF8Encoding(false)) { Quotes = true, Separator = this._delim, VerifyFieldCount = true }; // Write out the header WriteHeader(svw); // Use a sorted list of games to output foreach (string key in Items.SortedKeys) { List <DatItem> datItems = Items.FilteredItems(key); // If this machine doesn't contain any writable items, skip if (!ContainsWritable(datItems)) { continue; } // Resolve the names in the block datItems = DatItem.ResolveNames(datItems); for (int index = 0; index < datItems.Count; index++) { DatItem datItem = datItems[index]; // Check for a "null" item datItem = ProcessNullifiedItem(datItem); // Write out the item if we're not ignoring if (!ShouldIgnore(datItem, ignoreblanks)) { WriteDatItem(svw, datItem); } } } logger.Verbose("File written!" + Environment.NewLine); svw.Dispose(); fs.Dispose(); } catch (Exception ex) { logger.Error(ex); if (throwOnError) { throw ex; } return(false); } return(true); }
private void ExportText() { var fileName = Project.Current.MapPath(FileName); if (string.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("Filename is not provided."); } if (!Overwrite && File.Exists(fileName)) { throw new Exception($"File '{FileName}' already exists."); } if (Overwrite && File.Exists(fileName)) { File.Delete(fileName); } using var dataReader = GetDataSourceReader(_Output, DataSource, new DataSourceParameters() { IgnoreErrors = this.IgnoreErrors, Columns = this.SelectColumns, SkipColumns = this.SkipColumns }); var schema = new SeparatedValueSchema(); int colCount = Columns != null ? Columns.Length : dataReader.FieldCount; int[] columnIndexes = new int[colCount]; if (Columns != null) { for (int i = 0; i < Columns.Length; i++) { var column = Columns[i]; int columnIndex = dataReader.GetOrdinal(column.ColumnName); if (columnIndex < 0) { throw new Exception($"Cannot find column '{column.ColumnName}'."); } columnIndexes[i] = columnIndex; var definition = CreateColumnDefinition(column); schema.AddColumn(definition); } } else { for (int i = 0; i < dataReader.FieldCount; i++) { columnIndexes[i] = i; var column = new TextColumnDefinition() { ColumnName = dataReader.GetName(i), ColumnType = dataReader.GetFieldType(i) }; var definition = CreateColumnDefinition(column); schema.AddColumn(definition); } } var options = new SeparatedValueOptions(); if (Separator != null) { options.Separator = Separator; } if (RecordSeparator != null) { options.RecordSeparator = RecordSeparator; } options.Quote = Quote; options.IsFirstRecordSchema = !NoHeaderRow; options.PreserveWhiteSpace = PreserveWhiteSpace; if (!string.IsNullOrWhiteSpace(Culture)) { options.FormatProvider = new CultureInfo(Culture); } options.QuoteBehavior = QuoteBehavior; using var writer = new StreamWriter(File.OpenWrite(fileName)); var csvWriter = new SeparatedValueWriter(writer, schema, options); int columnCount = schema.ColumnDefinitions.Count; object[] values = new object[columnCount]; bool isHeader = options.IsFirstRecordSchema; while (dataReader.Read()) { for (int i = 0; i < columnCount; i++) { object value = dataReader.GetValue(columnIndexes[i]); if (value == DBNull.Value) { value = null; } if (!isHeader) { value = Utils.ChangeType(schema.ColumnDefinitions[i].ColumnType, value, null); } values[i] = value; isHeader = false; } csvWriter.Write(values); } }
/// <summary> /// Write out the footer-separator to the stream, if any exists /// </summary> /// <param name="svw">SeparatedValueWriter to write to</param> private void WriteFooterSeparator(SeparatedValueWriter svw) { svw.WriteString("\n"); svw.Flush(); }
/// <inheritdoc/> public override bool WriteToFile(string outfile, bool baddumpCol, bool nodumpCol, bool throwOnError = false) { InternalStopwatch watch = new InternalStopwatch($"Writing statistics to '{outfile}"); try { // Try to create the output file FileStream fs = File.Create(outfile); if (fs == null) { logger.Warning($"File '{outfile}' could not be created for writing! Please check to see if the file is writable"); return(false); } SeparatedValueWriter svw = new SeparatedValueWriter(fs, Encoding.UTF8) { Separator = _separator, Quotes = true, }; // Write out the header WriteHeader(svw, baddumpCol, nodumpCol); // Now process each of the statistics for (int i = 0; i < Statistics.Count; i++) { // Get the current statistic DatStatistics stat = Statistics[i]; // If we have a directory statistic if (stat.IsDirectory) { WriteIndividual(svw, stat, baddumpCol, nodumpCol); // If we have anything but the last value, write the separator if (i < Statistics.Count - 1) { WriteFooterSeparator(svw); } } // If we have a normal statistic else { WriteIndividual(svw, stat, baddumpCol, nodumpCol); } } svw.Dispose(); fs.Dispose(); } catch (Exception ex) when(!throwOnError) { logger.Error(ex); return(false); } finally { watch.Stop(); } return(true); }
/// <summary> /// Write out DatItem using the supplied SeparatedValueWriter /// </summary> /// <param name="svw">SeparatedValueWriter to output to</param> /// <param name="datItem">DatItem object to be output</param> private void WriteDatItem(SeparatedValueWriter svw, DatItem datItem) { // Build the state string[] fields = new string[2]; // Get the name field string name = string.Empty; switch (datItem.ItemType) { case ItemType.Disk: var disk = datItem as Disk; if (Header.GameName) { name = $"{disk.Machine.Name}{Path.DirectorySeparatorChar}"; } name += disk.Name; break; case ItemType.Media: var media = datItem as Media; if (Header.GameName) { name = $"{media.Machine.Name}{Path.DirectorySeparatorChar}"; } name += media.Name; break; case ItemType.Rom: var rom = datItem as Rom; if (Header.GameName) { name = $"{rom.Machine.Name}{Path.DirectorySeparatorChar}"; } name += rom.Name; break; } // Get the hash field and set final fields switch (_hash) { case Hash.CRC: switch (datItem.ItemType) { case ItemType.Rom: var rom = datItem as Rom; fields[0] = name; fields[1] = rom.CRC; break; } break; case Hash.MD5: switch (datItem.ItemType) { case ItemType.Disk: var disk = datItem as Disk; fields[0] = disk.MD5; fields[1] = name; break; case ItemType.Media: var media = datItem as Media; fields[0] = media.MD5; fields[1] = name; break; case ItemType.Rom: var rom = datItem as Rom; fields[0] = rom.MD5; fields[1] = name; break; } break; case Hash.SHA1: switch (datItem.ItemType) { case ItemType.Disk: var disk = datItem as Disk; fields[0] = disk.SHA1; fields[1] = name; break; case ItemType.Media: var media = datItem as Media; fields[0] = media.SHA1; fields[1] = name; break; case ItemType.Rom: var rom = datItem as Rom; fields[0] = rom.SHA1; fields[1] = name; break; } break; case Hash.SHA256: switch (datItem.ItemType) { case ItemType.Media: var media = datItem as Media; fields[0] = media.SHA256; fields[1] = name; break; case ItemType.Rom: var rom = datItem as Rom; fields[0] = rom.SHA256; fields[1] = name; break; } break; case Hash.SHA384: switch (datItem.ItemType) { case ItemType.Rom: var rom = datItem as Rom; fields[0] = rom.SHA384; fields[1] = name; break; } break; case Hash.SHA512: switch (datItem.ItemType) { case ItemType.Rom: var rom = datItem as Rom; fields[0] = rom.SHA512; fields[1] = name; break; } break; case Hash.SpamSum: switch (datItem.ItemType) { case ItemType.Media: var media = datItem as Media; fields[0] = media.SpamSum; fields[1] = name; break; case ItemType.Rom: var rom = datItem as Rom; fields[0] = rom.SpamSum; fields[1] = name; break; } break; } // If we had at least one field filled in if (!string.IsNullOrEmpty(fields[0]) || !string.IsNullOrEmpty(fields[1])) { svw.WriteValues(fields); } svw.Flush(); }
private static string writeValues(object[] values) { var schema = getSchema(); using (MemoryStream stream = new MemoryStream()) { using (SeparatedValueWriter writer = new SeparatedValueWriter(stream, schema)) { writer.Write(values); } stream.Position = 0; byte[] encoded = stream.ToArray(); return Encoding.Default.GetString(encoded); } }
private static string writeValues(object[] values) { var schema = getSchema(); StringWriter stringWriter = new StringWriter(); SeparatedValueWriter writer = new SeparatedValueWriter(stringWriter, schema); writer.Write(values); return stringWriter.ToString(); }
public void TestGetValues_BlankTrailingSection_ReturnsNull() { using (MemoryStream stream = new MemoryStream()) { SeparatedValueOptions options = new SeparatedValueOptions() { IsFirstRecordSchema = true }; SeparatedValueSchema schema = new SeparatedValueSchema(); schema.AddColumn(new Int32Column("id")) .AddColumn(new StringColumn("name")) .AddColumn(new DateTimeColumn("created") { InputFormat = "M/d/yyyy", OutputFormat = "M/d/yyyy" }) .AddColumn(new StringColumn("trailing")); object[] sources = new object[] { 123, "Bob", new DateTime(2013, 1, 19), "" }; using (SeparatedValueWriter builder = new SeparatedValueWriter(stream, schema, options)) { builder.Write(sources); } stream.Position = 0; SeparatedValueReader parser = new SeparatedValueReader(stream, schema, options); Assert.IsTrue(parser.Read(), "No records were found."); object[] values = parser.GetValues(); Assert.AreEqual(schema.ColumnDefinitions.Count, values.Length, "The wrong number of values were read."); Assert.AreEqual(sources[0], values[0], "The first column was not parsed correctly."); Assert.AreEqual(sources[1], values[1], "The second column was not parsed correctly."); Assert.AreEqual(sources[2], values[2], "The third column was not parsed correctly."); Assert.AreEqual(null, values[3], "The forth column was not interpreted as null."); Assert.IsFalse(parser.Read(), "Too many records were found."); } }