public static void WriteTable(TableFileBase tableFile) { string path = "./Assets/Resources/Tables/" + tableFile.GetType().Name + ".csv"; string[] lines = File.ReadAllLines(path, Encoding.Default); lines[0] = lines[0].Replace("\r\n", "\n"); StringReader rdr = new StringReader(string.Join("\n", lines)); List<string> titles = new List<string>(); using (var reader = new CsvReader(rdr)) { HeaderRecord header = reader.ReadHeaderRecord(); for (int i = 0; i < header.Count; ++i) { titles.Add(header[i]); } } File.Delete(path); using (StreamWriter SourceStream = new StreamWriter(path, false, System.Text.Encoding.Default)) { CsvWriter writer = new CsvWriter(SourceStream); writer.WriteRecord(titles.ToArray()); foreach (var record in tableFile.Records) { writer.WriteRecord(record.Value.GetRecordStr()); } } }
public static void WriteFile(ContentFile file) { if (!string.IsNullOrEmpty(file.ConstructFile.OldName)) { string oldPath = TableGlobalConfig.Instance.ResTablePath + "/" + file.ConstructFile.OldName + ".csv"; if (!string.IsNullOrEmpty(oldPath)) { File.Delete(oldPath); } file.WriteFlag = true; WriteConstruct.WriteFileOldName(file.ConstructFile, ""); } if(!file.IsNeedWrite()) return; string path = TableGlobalConfig.Instance.ResTablePath + "/" + file.ConstructFile.Name + ".csv"; if (string.IsNullOrEmpty(path)) return; File.Delete(path); using (StreamWriter SourceStream = new StreamWriter(path, false, System.Text.Encoding.UTF8)) { CsvWriter writer = new CsvWriter(SourceStream); writer.WriteRecord(file.ConstructFile.GetColumnTitles()); foreach (ContentRow row in file.ContentRow) { writer.WriteRecord(row.GetItemsStr()); } } file.AlreadyWrite(); }
public async Task write_csv_async_returns_number_of_items_written() { using (var writer = new CsvWriter(new StringWriter())) { writer.WriteRecord("some", "record"); var items = new TestType1[] { new TestType1 { Property1 = "1", Property2 = "2", Property3 = "3" }, new TestType1 { Property1 = "4", Property2 = "5", Property3 = "6" } }; Assert.Equal(2, await items.WriteCsvAsync(writer)); Assert.Equal(2, await items.WriteCsvAsync(writer, false)); } }
/// <summary> /// Writes WaterOneFlow data response as CSV file. /// </summary> /// <param name="data">WaterOneFlow data reponse to write.</param> /// <param name="ms">Memory Stream to write to.</param> /// <returns></returns> private async Task WriteDataToMemoryStreamAsCsv(SeriesData data, MemoryStream ms) { using (var csvwrtr = new CsvWriter(ms, Encoding.UTF8, true)) { csvwrtr.ValueSeparator = Char.Parse(","); csvwrtr.WriteRecord(new List<string>() { "TimeStamp" ,"Value","OffsetType","OffsetValue", "ValueAccuracy", "Qualifier","CensorCode" }); foreach (DataValue value in data.values) { List<string> values = new List<string>(); values.Add(value.TimeStamp.ToString("yyyy-MM-dd HH:mm:ss")); values.Add(value.Value.ToString()); values.Add(value.OffsetType); values.Add(value.OffsetValue.ToString()); values.Add(value.ValueAccuracy.ToString()); values.Add(value.Qualifier); values.Add(value.CensorCode); csvwrtr.WriteRecord(values); } await csvwrtr.FlushAsync(); } }
public async Task write_csv_async_returns_number_of_records_written() { using (var writer = new CsvWriter(new StringWriter())) { writer.WriteRecord("some", "record"); var dataTable = new DataTable(); dataTable.Columns.Add("First"); dataTable.Columns.Add("Second"); dataTable.Rows.Add("1", "2"); dataTable.Rows.Add("3", "4"); dataTable.Rows.Add("5", "6"); Assert.Equal(3, await dataTable.WriteCsvAsync(writer, false)); Assert.Equal(3, await dataTable.WriteCsvAsync(writer, true)); } }
public async Task fill_data_table_async_works_with_large_csv_input() { var csv = string.Empty; using (var stringWriter = new StringWriter()) using (var writer = new CsvWriter(stringWriter)) { writer.WriteRecord("Header1", "Header2"); for (var i = 0; i < 1000; ++i) { writer.WriteRecord("value0_" + i, "value1_" + i); } writer.Flush(); csv = stringWriter.ToString(); } // read less than all available records using (var reader = CsvReader.FromCsvString(csv)) { var table = new DataTable(); reader.ReadHeaderRecord(); Assert.Equal(913, await table.FillAsync(reader, 913)); Assert.Equal(913, table.Rows.Count); Assert.True(reader.HasMoreRecords); } // read exactly available records using (var reader = CsvReader.FromCsvString(csv)) { var table = new DataTable(); reader.ReadHeaderRecord(); Assert.Equal(1000, await table.FillAsync(reader, 1000)); Assert.Equal(1000, table.Rows.Count); Assert.False(reader.HasMoreRecords); } // attempt to read more than available records using (var reader = CsvReader.FromCsvString(csv)) { var table = new DataTable(); reader.ReadHeaderRecord(); Assert.Equal(1000, await table.FillAsync(reader, 1500)); Assert.Equal(1000, table.Rows.Count); Assert.False(reader.HasMoreRecords); } }
/// <summary> /// Writes all rows in <paramref name="this"/> to <paramref name="csvWriter"/>. /// </summary> /// <remarks> /// </remarks> /// <param name="this"> /// The <see cref="DataTable"/>. /// </param> /// <param name="csvWriter"> /// The <see cref="CsvWriter"/>. /// </param> /// <param name="writeHeaderRecord"> /// If <see langword="true"/>, a header record will also be written, which will be comprised of the column names defined for <paramref name="this"/>. /// </param> /// <param name="maximumRows"> /// The maximum number of rows from <paramref name="this"/> that should be written to <paramref name="csvWriter"/>. /// </param> /// <param name="objectToStringConverter"> /// Provides a means of converting values in the <see cref="DataRow"/>s to <see cref="String"/>s. /// </param> /// <returns> /// The actual number of rows from <paramref name="this"/> written to <paramref name="csvWriter"/>. /// </returns> public static int WriteCsv(this DataTable @this, CsvWriter csvWriter, bool writeHeaderRecord, int? maximumRows, Func<object, string> objectToStringConverter) { @this.AssertNotNull("@this"); csvWriter.AssertNotNull("csvWriter"); objectToStringConverter.AssertNotNull("objectToStringConverter"); var num = 0; if (writeHeaderRecord) { var columnNames = new string[@this.Columns.Count]; for (var i = 0; i < columnNames.Length; ++i) { columnNames[i] = @this.Columns[i].ColumnName; } csvWriter.WriteRecord(columnNames); } var maximum = maximumRows.GetValueOrDefault(int.MaxValue); var buffer = new DataRecord[16]; var bufferOffset = 0; foreach (DataRow row in @this.Rows) { var record = new DataRecord(); for (var i = 0; i < row.ItemArray.Length; ++i) { record.Add(objectToStringConverter(row.ItemArray[i])); } buffer[bufferOffset++] = record; if (bufferOffset == buffer.Length) { // buffer full csvWriter.WriteRecords(buffer, 0, buffer.Length); bufferOffset = 0; } if (++num == maximum) { break; } } // write any outstanding data in buffer csvWriter.WriteRecords(buffer, 0, bufferOffset); return num; }