public void GetName() { CsvDataReader reader = new CsvDataReader(@"..\..\SimpleCsv.txt"); Assert.AreEqual("Header1", reader.GetName(0)); reader.Dispose(); }
public void GetValuesTest() { var s = new StringBuilder(); s.AppendLine("Boolean,Byte,Bytes,Char,Chars,DateTime,Decimal,Double,Float,Guid,Short,Int,Long,Null"); s.AppendLine("true,1,0x0102,a,ab,1/1/2019,1.23,4.56,7.89,eca0c8c6-9a2a-4e6c-8599-3561abda13f1,1,2,3,null"); using (var reader = new StringReader(s.ToString())) using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture)) { csv.Configuration.Delimiter = ","; csv.Configuration.TypeConverterOptionsCache.GetOptions <string>().NullValues.Add("null"); var dataReader = new CsvDataReader(csv); dataReader.Read(); Assert.AreEqual(true, dataReader.GetBoolean(0)); Assert.AreEqual(1, dataReader.GetByte(1)); byte[] byteBuffer = new byte[2]; dataReader.GetBytes(2, 0, byteBuffer, 0, byteBuffer.Length); Assert.AreEqual(0x1, byteBuffer[0]); Assert.AreEqual(0x2, byteBuffer[1]); Assert.AreEqual('a', dataReader.GetChar(3)); char[] charBuffer = new char[2]; dataReader.GetChars(4, 0, charBuffer, 0, charBuffer.Length); Assert.AreEqual('a', charBuffer[0]); Assert.AreEqual('b', charBuffer[1]); Assert.IsNull(dataReader.GetData(0)); Assert.AreEqual(DateTime.Parse("1/1/2019"), dataReader.GetDateTime(5)); Assert.AreEqual(typeof(string).Name, dataReader.GetDataTypeName(0)); Assert.AreEqual(1.23m, dataReader.GetDecimal(6)); Assert.AreEqual(4.56d, dataReader.GetDouble(7)); Assert.AreEqual(typeof(string), dataReader.GetFieldType(0)); Assert.AreEqual(7.89f, dataReader.GetFloat(8)); Assert.AreEqual(Guid.Parse("eca0c8c6-9a2a-4e6c-8599-3561abda13f1"), dataReader.GetGuid(9)); Assert.AreEqual(1, dataReader.GetInt16(10)); Assert.AreEqual(2, dataReader.GetInt32(11)); Assert.AreEqual(3, dataReader.GetInt64(12)); Assert.AreEqual("Boolean", dataReader.GetName(0)); Assert.AreEqual(0, dataReader.GetOrdinal("Boolean")); Assert.AreEqual("true", dataReader.GetString(0)); Assert.AreEqual("true", dataReader.GetValue(0)); var objectBuffer = new object[14]; dataReader.GetValues(objectBuffer); Assert.AreEqual("true", objectBuffer[0]); Assert.AreEqual(DBNull.Value, objectBuffer[13]); Assert.IsTrue(dataReader.IsDBNull(13)); } }
/// <summary> /// 读取工作簿 /// </summary> /// <param name="csv">CSV文件读取器</param> /// <param name="firstRowIsTitle">第一行是否为标题</param> /// <param name="offsetRow">位移行数</param> /// <returns></returns> public static DataTable ReadCSV(this CsvReader csv, bool firstRowIsTitle = true, int offsetRow = 0) { using (var dr = new CsvDataReader(csv)) { var table = new DataTable(); if (firstRowIsTitle) { for (int i = offsetRow; i < dr.FieldCount; i++) { table.Columns.Add(dr.GetName(i)); } } table.Load(dr); return(table); } }
private async Task <int> UploadFromStream(Stream stream, DateTime dateFrom, DateTime dateTo, CancellationToken token) { int recordCount; var startTime = DateTime.UtcNow; TimeSpan processingTime; using (var reader = new StreamReader(stream, Encoding.UTF8)) using (var connection = GetSqlConnection()) { await connection.OpenAsync(token).ConfigureAwait(false); var bulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.TableLock, null); bulkCopy.DestinationTableName = "dbo.AzureUsageRecords_Stage"; bulkCopy.BatchSize = BatchSize; bulkCopy.NotifyAfter = BatchSize; bulkCopy.BulkCopyTimeout = BatchCopyTimeout; bulkCopy.SqlRowsCopied += BulkCopy_SqlRowsCopied; startTime = DateTime.UtcNow; string line1 = await reader.ReadLineAsync().ConfigureAwait(false); // skip the first line - contains billing period using (var recReader = new CsvDataReader <DetailedUsage>(reader, x => { return(Sink(x, dateFrom, dateTo)); }, TrackMaxLenghts)) { _batchStartTime = DateTime.UtcNow; // note: by default SqlBulkCopy relies on column ordinal only - create mappings for (int sourceColumnOrdinal = 0; sourceColumnOrdinal < recReader.FieldCount; sourceColumnOrdinal++) { string destinationColumnName = recReader.GetName(sourceColumnOrdinal); bulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping(sourceColumnOrdinal, destinationColumnName)); } try { await bulkCopy.WriteToServerAsync(recReader, token).ConfigureAwait(false); } catch (SqlException ex) { // Note: error 40197 with code 4815 indicates some text column length is too short if (TrackMaxLenghts && ex.Number == 40197) { string dataLengths = "Max field lengths registered:"; foreach (var field in recReader.MaxLenghts) { dataLengths += $"\n{field.Key} : {field.Value}"; } _logger.LogDebug(dataLengths); } throw; } catch (Exception ex) { throw; } recordCount = recReader.RecordsAffected; } } processingTime = DateTime.UtcNow.Subtract(startTime); if (recordCount != 0 && processingTime.TotalSeconds != 0) { _logger.LogInformation($"total {recordCount:n0} records uploaded in {processingTime.TotalSeconds:n1} s ({recordCount / processingTime.TotalSeconds:n1} rec/s)"); } else { _logger.LogInformation($"total {recordCount:n0} records uploaded in {processingTime.TotalSeconds:n1} s"); } return(recordCount); }
protected override void BeginProcessing() { var csvHelperConfig = Configuration ?? new Configuration(); csvHelperConfig.IncludePrivateMembers = true; csvHelperConfig.MemberTypes = MemberTypes.Fields | MemberTypes.Properties; csvHelperConfig.AllowComments = AllowComments; csvHelperConfig.BufferSize = BufferSize; csvHelperConfig.Comment = CommentChar; csvHelperConfig.HasHeaderRecord = !NoHeaderRecord; csvHelperConfig.IgnoreBlankLines = !KeepBlankLines; csvHelperConfig.IgnoreQuotes = IgnoreQuote; csvHelperConfig.TrimOptions = TrimOption; var p = MyInvocation.BoundParameters; if (p.ContainsKey("Delimiter")) { csvHelperConfig.Delimiter = Delimiter; } if (p.ContainsKey("EscapeChar")) { csvHelperConfig.Escape = EscapeChar; } if (p.ContainsKey("QuoteChar")) { csvHelperConfig.Quote = QuoteChar; } _config = new Config() { CsvHelperConfiguration = csvHelperConfig, InitialCapacity = InitialCapacity, ColumnNames = ColumnNames, ColumnNameMap = ColumnNameMap, ColumnTypes = ColumnTypes, Strict = Strict, Culture = Culture }; if (AsDataTable) { // The current version does not support combination of -AsDataTable and input stream. if (string.IsNullOrEmpty(Path)) { WriteError(new ErrorRecord(new ArgumentException("-Path is required when -AsDataTable is set"), "", ErrorCategory.InvalidArgument, null)); return; } using (var reader = new StreamReader(Path, Encoding)) using (var csvReader = new CsvReader(reader, _config.CsvHelperConfiguration)) using (var csvDataReader = new CsvDataReader(csvReader)) { var dt = new DataTable(); if (ColumnTypes != null) { foreach (DictionaryEntry entry in ColumnTypes) { dt.Columns.Add((string)entry.Key, (Type)entry.Value); } } else { for (int i = 0; i < csvDataReader.FieldCount; i++) { dt.Columns.Add(csvDataReader.GetName(i), csvDataReader.GetFieldType(i)); } } if (ReadCount > 0) { int rowCount = 0; while (csvDataReader.Read()) { if (rowCount % ReadCount == 0) { WriteVerbose($"Starting batch of {ReadCount} ({rowCount} records processed)"); } string[] row = new String[csvDataReader.FieldCount]; csvDataReader.GetValues(row); dt.LoadDataRow(row, true); rowCount++; if (rowCount % ReadCount == 0) { WriteObject(dt.Copy()); dt.Clear(); } } // Write out remaining rows, if any. if (dt.Rows.Count > 0) { WriteObject(dt); } } else { dt.Load(csvDataReader); WriteObject(dt); } } return; } if (!string.IsNullOrEmpty(Path)) { using (var reader = new StreamReader(Path, Encoding)) using (var loader = new CsvLoader(reader, _config)) { LoadFile(loader); } return; } _output = new BlockingCollection <object>(); _completeEvent = new ManualResetEvent(false); _loader = new CsvLoader(null, _config); var thread = new Thread(() => { try { LoadFile(_loader); } catch (Exception e) { _exception = e; } finally { _completeEvent.Set(); } }); thread.Name = "Import-Csv2 loader thread"; thread.Start(); }