internal static void CheckReferenceData() { // csv... string data = ResourceHelper.GetString("AmxMobile.Services.Resources.Countries.csv"); using (StringReader reader = new StringReader(data)) { CsvDataReader csv = new CsvDataReader(reader, true); // txn... using (TransactionState txn = Database.StartTransaction()) { try { while (csv.Read()) { // get... string name = csv.GetString("Common Name"); if (name == null) throw new InvalidOperationException("'name' is null."); if (name.Length == 0) throw new InvalidOperationException("'name' is zero-length."); // get... Country country = Country.GetByName(name); if (country == null) { country = new Country(); country.Name = name; if (string.Compare(name, "United Kingdom", true, Cultures.System) == 0) country.Ordinal = 1000; if (string.Compare(name, "United States", true, Cultures.System) == 0) country.Ordinal = 1001; else country.Ordinal = 9999; // save... country.SaveChanges(); } } // ok... txn.Commit(); } catch (Exception ex) { txn.Rollback(ex); throw new InvalidOperationException("The operation failed", ex); } } } }
public void GetValuesTest() { var s = new StringBuilder(); s.AppendLine("Boolean,Byte,Bytes,Char,Chars,DateTime,Decimal,Double,Float,Guid,Short,Int,Long,Null"); s.AppendLine("true,1,0x0102,a,ab,1/1/2019,1.23,4.56,7.89,eca0c8c6-9a2a-4e6c-8599-3561abda13f1,1,2,3,null"); using (var reader = new StringReader(s.ToString())) using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture)) { csv.Context.TypeConverterOptionsCache.GetOptions <string>().NullValues.Add("null"); var dataReader = new CsvDataReader(csv); dataReader.Read(); Assert.True(dataReader.GetBoolean(0)); Assert.Equal(1, dataReader.GetByte(1)); byte[] byteBuffer = new byte[2]; dataReader.GetBytes(2, 0, byteBuffer, 0, byteBuffer.Length); Assert.Equal(0x1, byteBuffer[0]); Assert.Equal(0x2, byteBuffer[1]); Assert.Equal('a', dataReader.GetChar(3)); char[] charBuffer = new char[2]; dataReader.GetChars(4, 0, charBuffer, 0, charBuffer.Length); Assert.Equal('a', charBuffer[0]); Assert.Equal('b', charBuffer[1]); Assert.Null(dataReader.GetData(0)); Assert.Equal(DateTime.Parse("1/1/2019"), dataReader.GetDateTime(5)); Assert.Equal(typeof(string).Name, dataReader.GetDataTypeName(0)); Assert.Equal(1.23m, dataReader.GetDecimal(6)); Assert.Equal(4.56d, dataReader.GetDouble(7)); Assert.Equal(typeof(string), dataReader.GetFieldType(0)); Assert.Equal(7.89f, dataReader.GetFloat(8)); Assert.Equal(Guid.Parse("eca0c8c6-9a2a-4e6c-8599-3561abda13f1"), dataReader.GetGuid(9)); Assert.Equal(1, dataReader.GetInt16(10)); Assert.Equal(2, dataReader.GetInt32(11)); Assert.Equal(3, dataReader.GetInt64(12)); Assert.Equal("Boolean", dataReader.GetName(0)); Assert.Equal(0, dataReader.GetOrdinal("Boolean")); Assert.Equal("true", dataReader.GetString(0)); Assert.Equal("true", dataReader.GetValue(0)); var objectBuffer = new object[14]; dataReader.GetValues(objectBuffer); Assert.Equal("true", objectBuffer[0]); Assert.Equal(DBNull.Value, objectBuffer[13]); Assert.True(dataReader.IsDBNull(13)); } }
public void ReadWithNoHeaderTest() { var s = new StringBuilder(); s.AppendLine("1,one"); s.AppendLine("2,two"); using (var reader = new StringReader(s.ToString())) using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture)) { csv.Configuration.HasHeaderRecord = false; csv.Configuration.Delimiter = ","; var dataReader = new CsvDataReader(csv); dataReader.Read(); Assert.AreEqual(1, dataReader.GetInt32(0)); Assert.AreEqual("one", dataReader.GetString(1)); dataReader.Read(); Assert.AreEqual(2, dataReader.GetInt32(0)); Assert.AreEqual("two", dataReader.GetString(1)); } }
public void AddStaticValue() { Dictionary <String, String> staticColumns = new Dictionary <String, String>(); staticColumns.Add("Column1", "Value"); CsvDataReader reader = new CsvDataReader(@"..\..\SimpleCsv.txt", staticColumns); Assert.AreEqual(3, reader.GetOrdinal("Column1")); while (reader.Read()) { Assert.AreEqual("Value", reader.GetValue(reader.GetOrdinal("Column1"))); } reader.Close(); reader.Dispose(); }
public void EndToEnd() { var expected = CelestialBody.All.ToArray(); var records = expected.Select(Flatten); var writer = new SimpleCsvWriter(); string csv = null; using (var target = new StringWriter()) { writer.Write(records, target); csv = target.ToString(); } var schema = new List <FieldDefinition> { "Name" }; schema.AddEnum <CelestialBodyType>("Type"); schema.Add("Mass", float.Parse); schema.Add("Distance to Primary", float.Parse); var actual = new List <CelestialBody>(); using (var source = new StringReader(csv)) using (var reader = new CsvDataReader(source, schema, Parser.Create())) { while (reader.Read()) { actual.Add(new CelestialBody { Name = reader.GetFieldValue <string>(0), Type = reader.GetFieldValue <CelestialBodyType>(1), Mass = reader.GetFieldValue <float>(2), Orbit = new OrbitalInfo { DistanceToPrimary = reader.GetFieldValue <float>(3) } }); } } actual.ShouldBeSameSequenceAs(expected); }
public void Read_ShouldProcessHeaderCorrectly(CsvDataReader sut) { // arrange.. var stream = new MemoryStream(); var streamWriter = new StreamWriter(stream); streamWriter.Write(HeaderString); streamWriter.Flush(); stream.Position = 0; // act.. var actual = sut.Read(stream); // assert.. actual.ShouldNotBeNull(); actual.ShouldBeEmpty(); // teardown.. streamWriter.Dispose(); stream.Dispose(); }
public void Read_ShouldReturnSuccessfulResultWithExpectedData(string fileName, char delimeter) { var path = TestUtils.BuildPathFor(fileName); var expectedData = new[] { new Dictionary <string, double> { { "one", 0.4 }, { "two", 11.03 }, { "three", 13.333 } }, new Dictionary <string, double> { { "one", 0.5 }, { "two", 12.05 }, { "three", 22.33 } }, new Dictionary <string, double> { { "one", 0.6 }, { "two", 0.06 }, { "three", -40.23 } } }; var reader = new CsvDataReader(delimeter); var result = reader.Read(path); result.Data.ShouldBeEquivalentTo(expectedData); }
private async Task <Either <IEnumerable <ValidationError>, Unit> > ValidateCsvFile(BlobInfo blob, bool isMetaFile) { var errors = new List <ValidationError>(); if (!await IsCsvFile(blob)) { errors.Add(isMetaFile ? new ValidationError($"{MetaFileMustBeCsvFile.GetEnumLabel()}") : new ValidationError($"{DataFileMustBeCsvFile.GetEnumLabel()}")); } else { await using var stream = await _fileStorageService.StreamBlob(blob); using var reader = new StreamReader(stream); using var csv = new CsvReader(reader, CultureInfo.InvariantCulture); csv.Configuration.HasHeaderRecord = false; using var dr = new CsvDataReader(csv); var colCount = -1; var idx = 0; while (dr.Read()) { idx++; if (colCount >= 0 && dr.FieldCount != colCount) { errors.Add(isMetaFile ? new ValidationError($"error at row {idx}: {MetaFileHasInvalidNumberOfColumns.GetEnumLabel()}") : new ValidationError($"error at row {idx}: {DataFileHasInvalidNumberOfColumns.GetEnumLabel()}")); break; } colCount = dr.FieldCount; } } if (errors.Count > 0) { return(errors); } return(Unit.Instance); }
private static void Main(string[] args) { using (var textReader = new StreamReader(@".\test.csv")) { var config = new CsvConfiguration(CultureInfo.InvariantCulture) { HasHeaderRecord = false }; var csvReader = new CsvReader(textReader, config); var csvDataReader = new CsvDataReader(csvReader); while (csvDataReader.Read()) { for (int i = 0; i < csvDataReader.FieldCount; i++) { Console.Write(csvDataReader.GetString(i) + " "); } Console.WriteLine(" "); } } Console.Read(); }
public void Read() { reader.Read(); }
public override DataTable ToDataTable(DataTableConvertParams context, Stream stream) { if (context == null) { context = new DataTableConvertParams(); } StreamReader sr = new StreamReader(stream); DataTable dt = new DataTable(); if (context.RecordDelim != '\n') { #region If Record Delimiter is overriden, itterate through all characters and replace them with new line //https://stackoverflow.com/questions/1232443/writing-to-then-reading-from-a-memorystream MemoryStream ms = new MemoryStream(); StreamWriter sw = new StreamWriter(ms); while (sr.Peek() >= 0) { Char c = (Char)sr.Read(); if (c == context.RecordDelim) { sw.Write('\n'); } else { sw.Write(c); } } sw.Flush(); ms.Position = 0; sr = new StreamReader(ms, Encoding.UTF8); #endregion If Record Delimiter is overriden, itterate through all characters and replace them with new line } //_conf.BadDataFound = cxt => //{ //For debugging (put breakpoints here) //}; _conf.Delimiter = context.FieldDelim.ToString(); if (_conf.Delimiter != ",") { _conf.IgnoreQuotes = true; } _conf.HasHeaderRecord = context.HasHeaders; _conf.MissingFieldFound = null; CsvReader csvReader = new CsvReader(sr, _conf); CsvDataReader dataReader = new CsvDataReader(csvReader); if (!context.HasHeaders) { #region If No Headers loop through all records and add columns as columns are found while (dataReader.Read()) { while (dt.Columns.Count < dataReader.FieldCount) { dt.Columns.Add($"Col{dt.Columns.Count}"); } DataRow row = dt.NewRow(); for (int i = 0; i < dataReader.FieldCount; i++) { row[i] = dataReader.GetValue(i); } dt.Rows.Add(row); } dataReader.Close(); sr.Close(); #endregion If No Headers loop through all records and add columns as columns are found } else { #region If there are headers DataTable.Load will suffice try { dt.Load(dataReader, LoadOption.Upsert); } catch (Exception ex) { throw ex; } finally { dataReader.Close(); sr.Close(); } #endregion If there are headers DataTable.Load will suffice } //CsvHelper seems to make DataTable Columns readonly foreach (DataColumn dc in dt.Columns) { dc.ReadOnly = false; } return(dt); }
// Read in rows from a .csv file in a stream static async Task <long> importCSV(string src, IMongoCollection <BsonDocument> collection, int import_chunk_size) { long count = 0; var documents = new List <BsonDocument>(); using (var fin = new StreamReader(src)) using (var csv = new CsvReader(fin)) using (var cr = new CsvDataReader(csv)) { // Create utility arrays that can hold all elements of the header row and any given data row int amt_cols = cr.FieldCount; Debug.Assert(amt_cols > 0); var headers = new object[amt_cols]; var records = new object[amt_cols]; logger.Debug($"Parsed {amt_cols} columns in file {src}"); var ret = cr.GetValues(headers); Debug.Assert(ret > 0); logger.Debug($"Returned {ret} : " + string.Join(',', headers)); // Keep reading until EOF while (cr.Read()) { // Read record line ret = cr.GetValues(records); Debug.Assert(ret > 0); // Create a dictionary mapping each header element to its respective record element // Weed out any empty string elements var zipped = headers.Zip(records, (h, r) => new { h, r }) .Where(item => item.r.ToString() != "") .ToDictionary(item => item.h, item => { int i; double d; string r = item.r.ToString(); if (r.StartsWith('$')) { r = r.Substring(1); } if (int.TryParse(r, out i)) { return(i); } if (double.TryParse(r, out d)) { return(d); } return(item.r); }); // Add dictionary to import buffer documents.Add(zipped.ToBsonDocument()); ++count; // Add documents in batches to the db if (count % import_chunk_size == 0) { await collection.InsertManyAsync(documents); logger.Debug($"Uploded {import_chunk_size} records to {collection.CollectionNamespace.CollectionName}..."); documents.Clear(); } } // Add any remaining docs to the db if (documents.Count != 0) { await collection.InsertManyAsync(documents); } } return(count); }
protected override void BeginProcessing() { var csvHelperConfig = Configuration ?? new Configuration(); csvHelperConfig.IncludePrivateMembers = true; csvHelperConfig.MemberTypes = MemberTypes.Fields | MemberTypes.Properties; csvHelperConfig.AllowComments = AllowComments; csvHelperConfig.BufferSize = BufferSize; csvHelperConfig.Comment = CommentChar; csvHelperConfig.HasHeaderRecord = !NoHeaderRecord; csvHelperConfig.IgnoreBlankLines = !KeepBlankLines; csvHelperConfig.IgnoreQuotes = IgnoreQuote; csvHelperConfig.TrimOptions = TrimOption; var p = MyInvocation.BoundParameters; if (p.ContainsKey("Delimiter")) { csvHelperConfig.Delimiter = Delimiter; } if (p.ContainsKey("EscapeChar")) { csvHelperConfig.Escape = EscapeChar; } if (p.ContainsKey("QuoteChar")) { csvHelperConfig.Quote = QuoteChar; } _config = new Config() { CsvHelperConfiguration = csvHelperConfig, InitialCapacity = InitialCapacity, ColumnNames = ColumnNames, ColumnNameMap = ColumnNameMap, ColumnTypes = ColumnTypes, Strict = Strict, Culture = Culture }; if (AsDataTable) { // The current version does not support combination of -AsDataTable and input stream. if (string.IsNullOrEmpty(Path)) { WriteError(new ErrorRecord(new ArgumentException("-Path is required when -AsDataTable is set"), "", ErrorCategory.InvalidArgument, null)); return; } using (var reader = new StreamReader(Path, Encoding)) using (var csvReader = new CsvReader(reader, _config.CsvHelperConfiguration)) using (var csvDataReader = new CsvDataReader(csvReader)) { var dt = new DataTable(); if (ColumnTypes != null) { foreach (DictionaryEntry entry in ColumnTypes) { dt.Columns.Add((string)entry.Key, (Type)entry.Value); } } else { for (int i = 0; i < csvDataReader.FieldCount; i++) { dt.Columns.Add(csvDataReader.GetName(i), csvDataReader.GetFieldType(i)); } } if (ReadCount > 0) { int rowCount = 0; while (csvDataReader.Read()) { if (rowCount % ReadCount == 0) { WriteVerbose($"Starting batch of {ReadCount} ({rowCount} records processed)"); } string[] row = new String[csvDataReader.FieldCount]; csvDataReader.GetValues(row); dt.LoadDataRow(row, true); rowCount++; if (rowCount % ReadCount == 0) { WriteObject(dt.Copy()); dt.Clear(); } } // Write out remaining rows, if any. if (dt.Rows.Count > 0) { WriteObject(dt); } } else { dt.Load(csvDataReader); WriteObject(dt); } } return; } if (!string.IsNullOrEmpty(Path)) { using (var reader = new StreamReader(Path, Encoding)) using (var loader = new CsvLoader(reader, _config)) { LoadFile(loader); } return; } _output = new BlockingCollection <object>(); _completeEvent = new ManualResetEvent(false); _loader = new CsvLoader(null, _config); var thread = new Thread(() => { try { LoadFile(_loader); } catch (Exception e) { _exception = e; } finally { _completeEvent.Set(); } }); thread.Name = "Import-Csv2 loader thread"; thread.Start(); }