public void ArgumentTestIndexer1() { using (CsvRecordReader csv = new CsvRecordReader(new StringReader(CsvReaderSampleData.SampleData1), false)) { record = csv.Read(); string s = record[-1]; } }
public CaptureMapper(CsvRecord record) { InitializeComponent(); Model = new CaptureMapperModel(); Model.CsvRecord = record.ToString(); string memo = string.Empty; int refIndex = record.Fields[5].IndexOf(" REF "); if (refIndex > 0) memo = record.Fields[5].Substring(0, record.Fields[5].IndexOf(" REF ")); else memo = record.Fields[5]; Model.RegexPattern = record.Fields[4] + ".*" + memo.Trim(); this.DataContext = Model; }
public static void SetMappers(List<Mapper> mappers) { var csvFile = new CsvFile(); csvFile.Headers.Add("RegexPattern"); csvFile.Headers.Add("Category"); foreach (var mapper in mappers) { var record = new CsvRecord(); record.Fields.Add(mapper.RegexPattern); record.Fields.Add(mapper.Category); csvFile.Records.Add(record); } using (CsvWriter writer = new CsvWriter()) writer.WriteCsv(csvFile, "t"+path); File.Delete(path); File.Move("t" + path, path); }
public void WriteFileTest() { var header = CsvHeader.FromValues("Name", "Age"); var records = new CsvRecord[] { new CsvRecord(header, new string[] { "Karl", "26" }), new CsvRecord(header, new string[] { "Elena", "17" }) }; using (var tempFile = new TempFile()) { CsvWriter.WriteFile(header, records, tempFile.FullName); using (var reader = new StreamReader(tempFile.FullName)) { string data = reader.ReadToEnd(); Assert.AreEqual( "Name,Age\r\n" + "Karl,26\r\n" + "Elena,17\r\n", data); } } }
public void UnicodeParsingTest3() { byte[] buffer; string test = "München"; using (MemoryStream stream = new MemoryStream()) { using (TextWriter writer = new StreamWriter(stream, Encoding.Unicode)) { writer.Write(test); } buffer = stream.ToArray(); } using (CsvRecordReader csv = new CsvRecordReader(new StreamReader(new MemoryStream(buffer), Encoding.Unicode, false), false)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(test, record[0]); Assert.IsNull(csv.Read()); } }
public void CSVWirteReadLargeFile() { var field = "\"'|\\/?>,<"; var filePath = RootFileNameAndEnsureTargetFolderExists("App_Data/csv" + DateTimeOffset.UtcNow.ToFileTime() + ".csv"); var csvFile = new CsvFile(); foreach (var r in Enumerable.Range(1, 10000).Select( i => { var rec = new CsvRecord(); foreach (var r in Enumerable.Range(1, 80).Select(s => field)) { rec.Fields.Add(r); } return(rec); })) { csvFile.Records.Add(r); } using (var wr = new CsvWriter()) { wr.WriteCsv(csvFile, filePath); } var filed1 = ""; using (var reader = new CsvReader(filePath)) { if (reader.ReadNextRecord()) { filed1 = reader.Fields[0]; } } Assert.AreEqual(filed1, field); }
private bool Read() { if (!_isHeaderReaded) { ReadHeaderRow(); _isHeaderReaded = true; } if (Reader.EndOfStream) { return(false); } var fields = ReadRow(); if (fields.Length != _headers.Count) { throw new CsvReadingException("Invalid row. Fields are not accepts to headers"); } CurrentRecord = new CsvRecord(); for (var i = 0; i < _headers.Count; i++) { CurrentRecord.Add(_headers.ElementAt(i), fields[i]); } return(true); }
public void ParsingTest34() { const string data = "\"Chicane\", \"Love on the Run\", \"Knight Rider\", \"This field contains a comma, but it doesn't matter as the field is quoted\"\r\n" + "\"Samuel Barber\", \"Adagio for Strings\", \"Classical\", \"This field contains a double quote character, \"\", but it doesn't matter as it is escaped\""; using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), false, ',', '\"', '\"', '#', ValueTrimmingOptions.UnquotedOnly)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("Chicane", record[0]); Assert.AreEqual("Love on the Run", record[1]); Assert.AreEqual("Knight Rider", record[2]); Assert.AreEqual("This field contains a comma, but it doesn't matter as the field is quoted", record[3]); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(4, csv.FieldCount); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("Samuel Barber", record[0]); Assert.AreEqual("Adagio for Strings", record[1]); Assert.AreEqual("Classical", record[2]); Assert.AreEqual("This field contains a double quote character, \", but it doesn't matter as it is escaped", record[3]); Assert.AreEqual(1, csv.CurrentRecordIndex); Assert.AreEqual(4, csv.FieldCount); Assert.IsNull(csv.Read()); } }
public void ParsingTest32() { const string data = "\"1\",Bruce\r\n\"2\n\",Toni\r\n\"3\",Brian\r\n"; using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), false, ',', '\"', '\"', '#', ValueTrimmingOptions.None)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("1", record[0]); Assert.AreEqual("Bruce", record[1]); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(2, csv.FieldCount); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("2\n", record[0]); Assert.AreEqual("Toni", record[1]); Assert.AreEqual(1, csv.CurrentRecordIndex); Assert.AreEqual(2, csv.FieldCount); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("3", record[0]); Assert.AreEqual("Brian", record[1]); Assert.AreEqual(2, csv.CurrentRecordIndex); Assert.AreEqual(2, csv.FieldCount); Assert.IsNull(csv.Read()); } }
public void SaveDatabaseToFile(DataTable data, string destFile) { CsvFile file = new CsvFile(); foreach (DataColumn col in data.Columns) { file.Headers.Add(col.ColumnName); } int nCols = data.Columns.Count; foreach (DataRow row in data.Rows) { CsvRecord record = new CsvRecord(); for (int i = 0; i < nCols; i++) { record.Fields.Add(row[i].ToString()); } file.Records.Add(record); } using (CsvWriter writer = new CsvWriter()) { writer.WriteCsv(file, destFile); } }
private bool ValueAlreadyPresent(CsvRecord record, char c) { bool result = false; using (var dbContext = new AdScoreShowDbContext()) { switch (c) { case 's': Segment segment = dbContext.Segments.SingleOrDefault(s => s.Category == record.Segment); if (segment != null) { result = true; } break; case 'b': string bd = record.Brand; Brand brand = dbContext.Brands.SingleOrDefault(b => b.Name == record.Brand); if (brand != null) { result = true; } break; case 'm': Market market = dbContext.Markets.SingleOrDefault(m => m.Country == record.Market); if (market != null) { result = true; } break; case 'a': Advertisement advert = dbContext.Advertisements .SingleOrDefault(a => a.Copy_Name == record.Copy_Name && a.Copy_Duration == record.Copy_Duration); if (advert != null) { result = true; } break; case 'n': Advertisement ads = dbContext.Advertisements .SingleOrDefault(a => a.Copy_Name == record.Copy_Name & a.Copy_Duration == record.Copy_Duration); Market mkt = dbContext.Markets.SingleOrDefault(m => m.Country == record.Market); AdvertAired adv = dbContext.AdvertAireds.SingleOrDefault(a => a.AdvertisementID == ads.Id && a.MarketID == mkt.Id); if (adv != null) { result = true; } break; default: break; } } return(result); }
public void FillTest4() { var rec = new CsvRecord(2, false, true); rec.Fill(null !); }
protected void btnFinishCreate_Click(object sender, EventArgs e) { if (csvTableGrid.Controls.Count <= 0) { return; } try { bool hasHeader = bool.Parse(csvMakeFirstRowHeader.SelectedValue); string title = csvTitle.Text; if (string.IsNullOrWhiteSpace(title)) { return; } // creating var newCsv = new CsvTable(); newCsv.Location = new Uri(Server.MapPath("~/App_Data/csv") + "/" + title + ".csv"); newCsv.Settings = new CsvSettings() { Header = hasHeader, Separator = csvSeparator.SelectedValue }; Table table = (Table)csvTableGrid.FindControl("dynamicTable"); if (table == null) { return; } List <CsvRecord> records = new List <CsvRecord>(); int i = 0; foreach (TableRow row in table.Rows) { var record = new CsvRecord(); record.Id = i; int j = 0; foreach (TableCell cell in row.Cells) { TextBox textBox = (TextBox)cell.Controls[0]; record.Fields.Add(new CsvField() { Id = j, Value = textBox.Text }); j++; } records.Add(record); i++; } if (hasHeader) { newCsv.Header = records[0]; newCsv.Records = records.Skip(1).ToList(); } else { newCsv.Records = records; } var service = CustomServiceLocator.Instance.GetCsvService(); service.SaveTable(newCsv); Response.Redirect("~/", true); } catch (Exception ex) { // alert here and log error } }
public static CsvRecord[] GetRecords(string content, string delimiter) { List <CsvRecord> records = new List <CsvRecord>(); //List<string> fields = new List<string>(); content = content + "\n"; var charArray = content.ToArray(); //the following variables are used in the for loop. var currentField = string.Empty; var concatenating = true; //meaning we are concatenating a field. or to say, we find the start but we are not at the end of a field yet. var startWithDoubleQuote = false; var startRecord = true; var currentRecord = new CsvRecord(); records.Add(currentRecord); for (int i = 0; i < charArray.Length; i++) { var currentChar = charArray[i]; var nextChar = i + 1 < charArray.Length? charArray[i + 1] : '\0'; if (startRecord) { switch (currentChar) { case '\r': continue; case '\n': continue; case ',': currentRecord.Add(string.Empty); concatenating = false; break; case '"': startWithDoubleQuote = true; concatenating = true; break; default: currentField += currentChar; startWithDoubleQuote = false; concatenating = true; break; } startRecord = false; } else if (concatenating) { if (startWithDoubleQuote) { if (currentChar == '"') { //if this is the last char if (i + 1 == charArray.Length) { currentField += currentChar; AddField(currentRecord, ref currentField, ref concatenating); continue; } if (nextChar == '"') { currentField += currentChar; i++; } else //nextChar != '"' { AddField(currentRecord, ref currentField, ref concatenating); if (nextChar == ',') { i++; } } } else // currentChar!='"' { if (i + 1 == charArray.Length) { throw new CsvFormatException(); } currentField += currentChar; } } else //not startWithDoubleQuote { //if this is the last char if (i + 1 == charArray.Length) { currentField += currentChar; currentField = currentField.Trim(); currentRecord.Add(currentField); continue; } switch (currentChar) { case '\r': case '\n': currentRecord.Add(currentField); currentRecord = new CsvRecord(); currentField = string.Empty; records.Add(currentRecord); startRecord = true; continue; case ',': AddField(currentRecord, ref currentField, ref concatenating); break; default: currentField += currentChar; break; } } } else // not concatenating { switch (currentChar) { case '\r': case '\n': var previousChar = i > 0? charArray[i - 1]:'\0'; if (previousChar == ',') { currentRecord.Add(string.Empty); currentRecord = new CsvRecord(); records.Add(currentRecord); startRecord = true; } else { currentRecord = new CsvRecord(); records.Add(currentRecord); startRecord = true; } continue; case ',': currentRecord.Add(string.Empty); break; case '"': startWithDoubleQuote = true; concatenating = true; break; default: startWithDoubleQuote = false; concatenating = true; currentField += currentChar; break; } } } //checked if the last record is empty var last = records.Last(); var isEmpty = false; if (last.Count == 0) { isEmpty = true; } else { foreach (string s in last) { if (string.IsNullOrEmpty(s)) { isEmpty = true; break; } } } if (isEmpty) { records.Remove(last); } return(records.ToArray()); }
public void ContainsColumnTest2() { var rec = new CsvRecord(new string[0], false, false, true, false); _ = rec.ContainsColumn(null !); }
public void ParsingTest41() { using (CsvRecordReader csv = new CsvRecordReader(new StringReader("First record ,Second record"), false, CsvReader.DefaultDelimiter, CsvReader.DefaultQuote, CsvReader.DefaultEscape, CsvReader.DefaultComment, ValueTrimmingOptions.UnquotedOnly, 16)) { Assert.AreEqual(2, csv.FieldCount); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("First record", record[0]); Assert.AreEqual("Second record", record[1]); Assert.IsNull(csv.Read()); } }
public void ParsingTest16() { const string data = "1\r2\n"; using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), false, '\r', '"', '\"', '#', ValueTrimmingOptions.UnquotedOnly)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("1", record[0]); Assert.AreEqual("2", record[1]); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(2, csv.FieldCount); Assert.IsNull(csv.Read()); } }
public void ParsingTest30() { const string data = "1\r\n\r\n1"; using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), false)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("1", record[0]); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(1, csv.FieldCount); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("1", record[0]); Assert.AreEqual(1, csv.CurrentRecordIndex); Assert.AreEqual(1, csv.FieldCount); Assert.IsNull(csv.Read()); } }
public void ParsingTest31() { const string data = "1\r\n# bunch of crazy stuff here\r\n1"; using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), false, ',', '\"', '\"', '#', ValueTrimmingOptions.None)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("1", record[0]); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(1, csv.FieldCount); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("1", record[0]); Assert.AreEqual(1, csv.CurrentRecordIndex); Assert.AreEqual(1, csv.FieldCount); Assert.IsNull(csv.Read()); } }
public void ParsingTest29() { string data = new String('a', 75) + "," + new String('b', 75); using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), false)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(new String('a', 75), record[0]); Assert.AreEqual(new String('b', 75), record[1]); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(2, csv.FieldCount); Assert.IsNull(csv.Read()); } }
public void ParsingTest3() { const string data = "1\r2\n"; using (CsvRecordReader csv = new CsvRecordReader(new StringReader(data), false)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("1", record[0]); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("2", record[0]); Assert.IsNull(csv.Read()); } }
public void ParsingTest22() { const string data = "\r\r\n1\r\r"; using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), false, '\r', '\"', '\"', '#', ValueTrimmingOptions.None)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(3, csv.FieldCount); Assert.AreEqual(String.Empty, record[0]); Assert.AreEqual(String.Empty, record[1]); Assert.AreEqual(String.Empty, record[2]); Assert.AreEqual(0, csv.CurrentRecordIndex); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("1", record[0]); Assert.AreEqual(String.Empty, record[1]); Assert.AreEqual(1, csv.CurrentRecordIndex); Assert.IsNull(csv.Read()); } }
public void ParsingTest20() { const string data = "user_id,name\r\n1,Bruce"; using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), true)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("1", record[0]); Assert.AreEqual("Bruce", record[1]); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(2, csv.FieldCount); Assert.AreEqual("1", record["user_id"]); Assert.AreEqual("Bruce", record["name"]); Assert.IsNull(csv.Read()); Assert.IsNull(csv.Read()); } }
public void ParsingTest2() { // ["Bob said, ""Hey!""",2, 3 ] const string data = "\"Bob said, \"\"Hey!\"\"\",2, 3 "; using (CsvRecordReader csv = new CsvRecordReader(new StringReader(data), false)) { CsvRecord record; record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(@"Bob said, ""Hey!""", record[0]); Assert.AreEqual("2", record[1]); Assert.AreEqual("3", record[2]); Assert.IsNull(csv.Read()); } using (CsvRecordReader csv = new CsvRecordReader(new StringReader(data), false, ',', '"', '"', '#', ValueTrimmingOptions.None)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(@"Bob said, ""Hey!""", record[0]); Assert.AreEqual("2", record[1]); Assert.AreEqual(" 3 ", record[2]); Assert.IsNull(csv.Read()); } }
public void UnicodeParsingTest1() { // control characters and comma are skipped char[] raw = new char[65536 - 13]; for (int i = 0; i < raw.Length; i++) raw[i] = (char)(i + 14); raw[44 - 14] = ' '; // skip comma string data = new string(raw); using (CsvRecordReader csv = new CsvRecordReader(new StringReader(data), false)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(data, record[0]); Assert.IsNull(csv.Read()); } }
public void ParsingTest7() { using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader("\r\n1\r\n"), false)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(',', csv.Delimiter); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(1, csv.FieldCount); Assert.AreEqual("1", record[0]); Assert.IsNull(csv.Read()); } }
public void ParsingTest40() { using (CsvRecordReader csv = new CsvRecordReader(new StringReader("\"00\",\n\"10\","), false)) { Assert.AreEqual(2, csv.FieldCount); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("00", record[0]); Assert.AreEqual(string.Empty, record[1]); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("10", record[0]); Assert.AreEqual(string.Empty, record[1]); Assert.IsNull(csv.Read()); } }
private static SqlCommand MakeCommand(SqlConnection connection, IEnumerable <ColumnDef> columnDefs, CsvRecord headers) { //TODO: ensure that all the not-nullable columns have corresponding headers (this will include the primary key fields). //TODO: ensure that all the columns in headers are actually in the table. var toReturn = connection.CreateCommand(); var tableName = columnDefs.First().TableName; /* * update [dbo].[DinerType] * set [Description] = @Description, * [Code] = @Code * where [Id] = @Id * * insert into [dbo].[DinerType] ([Id], [Description], [Code]) select @Id, @Description, @Code where not exists(select * from [dbo].[DinerType] where [Id] = @Id) */ string setClause = GetNonPkSetClause(columnDefs, headers); string pkWhereClause = GetPkWhereClause(columnDefs); string insertColumnListClause = GetInsertColumnListClause(headers); string insertSelectColumnListClause = GetInsertSelectColumnListClause(headers); //the setClause might be an empty string when this is a linking table string commandText = ""; if (!string.IsNullOrEmpty(setClause)) { commandText = string.Format("update [dbo].[{0}] {1} {2}", tableName, setClause, pkWhereClause); } commandText += string.Format( @" insert into [dbo].[{0}] ({2}) select {3} where not exists (select * from [dbo].[{0}] {1})", tableName, pkWhereClause, insertColumnListClause, insertSelectColumnListClause); if (columnDefs.Any(cd => cd.IsIdentity)) { commandText = "set identity_insert [dbo].[" + tableName + "] on\r\n" + commandText + "\r\nset identity_insert [dbo].[" + tableName + "] off\r\n"; } toReturn.CommandText = commandText; foreach (CsvField header in headers.Fields) { CsvField header1 = header; var columnDef = columnDefs.First(cd => string.Equals(cd.ColumnName, header1.Value, StringComparison.OrdinalIgnoreCase)); toReturn.Parameters.Add(MakeParameter(columnDef)); } toReturn.CommandType = CommandType.Text; return(toReturn); }
public void ParsingTest42() { using (var csv = new CsvRecordReader(new StringReader(" "), false)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(1, csv.FieldCount); Assert.AreEqual(string.Empty, record[0]); Assert.IsNull(csv.Read()); } }
public void ParsingTest8() { const string data = "\"bob said, \"\"Hey!\"\"\",2, 3 "; using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), false, ',', '\"', '\"', '#', ValueTrimmingOptions.UnquotedOnly)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("bob said, \"Hey!\"", record[0]); Assert.AreEqual("2", record[1]); Assert.AreEqual("3", record[2]); Assert.AreEqual(',', csv.Delimiter); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(3, csv.FieldCount); Assert.IsNull(csv.Read()); } }
private static void AddField(CsvRecord currentRecord, ref string currentField, ref bool concatenating) { currentRecord.Add(currentField.Trim()); currentField = string.Empty; concatenating = false; }
public void ParsingTest33() { const string data = "\"double\\\\\\\\double backslash\""; using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), false, ',', '\"', '\\', '#', ValueTrimmingOptions.None)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("double\\\\double backslash", record[0]); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(1, csv.FieldCount); Assert.IsNull(csv.Read()); } }
private static bool FrameworkProgTypeSelected(CsvRecord record) { return(!string.IsNullOrWhiteSpace(record.ProgType) && InList(record.ProgType, new[] { "2", "3", "20", "21", "22", "23" })); }
public void FillTest3() { var rec = new CsvRecord(2, false, true); rec.Fill(new string[] { "1", "2", "3" }); }
public async Task ImportAsync(Stream stream) { var deserializeBlock = new TransformBlock <CsvRecord, AuthorRecord>(x => { var data = new AuthorData(); var json = JObject.Parse(x.Json); if (json.TryGetValue("name", out var name)) { data.Name = GetString(name); } if (json.TryGetValue("birth_date", out var birthdate)) { data.Birthdate = GetString(birthdate); } if (json.TryGetValue("bio", out var bio)) { data.Bio = GetString(bio); } if (json.TryGetValue("personal_name", out var personalName)) { data.PersonalName = GetString(personalName); } if (json.TryGetValue("wikipedia", out var wikipedia)) { data.Wikipedia = GetString(wikipedia); } return(new AuthorRecord(x.Id, data)); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 1, BoundedCapacity = 100 }); var batchBlock = new BatchBlock <AuthorRecord>(100, new GroupingDataflowBlockOptions { BoundedCapacity = 100 }); var totalFailed = 0; var totalSuccess = 0; Console.Write("Importing (success/failed)..."); var y = Console.CursorTop; var x = Console.CursorLeft; var lockObject = new object(); var importBlock = new ActionBlock <AuthorRecord[]>(async authors => { var request = new BulkUpdate { OptimizeValidation = true, DoNotScript = true, DoNotValidate = false, DoNotValidateWorkflow = true, Jobs = authors.Select(x => { return(new BulkUpdateJob { Id = x.Id, Data = x.Author, Type = BulkUpdateType.Upsert }); }).ToList() }; var response = await client.BulkUpdateAsync(request); lock (lockObject) { totalFailed += response.Count(x => x.Error != null); totalSuccess += response.Count(x => x.Error == null); Console.SetCursorPosition(x, y); Console.Write("{0}/{1}", totalSuccess, totalFailed); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 1, BoundedCapacity = Environment.ProcessorCount * 2 }); deserializeBlock.LinkTo(batchBlock, new DataflowLinkOptions { PropagateCompletion = true }); batchBlock.LinkTo(importBlock, new DataflowLinkOptions { PropagateCompletion = true }); using (var streamReader = new StreamReader(stream)) { var configuration = new CsvConfiguration(CultureInfo.InvariantCulture) { BadDataFound = null, Delimiter = "\t" }; using (var csvReader = new CsvReader(streamReader, configuration)) { while (await csvReader.ReadAsync()) { var record = new CsvRecord( csvReader.GetField(1), csvReader.GetField(4)); await deserializeBlock.SendAsync(record); } } } deserializeBlock.Complete(); await importBlock.Completion; Console.WriteLine(); }
/// <summary> /// Gets the field value at the specified record index for the supplied field name /// </summary> /// <param name="recordIndex">Record index</param> /// <param name="fieldName">Field name</param> /// <returns></returns> public string this[int recordIndex, string fieldName] { get { if (recordIndex > (Records.Count - 1)) { throw new IndexOutOfRangeException(string.Format("There is no record at index {0}.", recordIndex)); } CsvRecord record = Records[recordIndex]; int fieldIndex = -1; for (int i = 0; i < Headers.Count; i++) { if (string.Compare(Headers[i], fieldName) != 0) { continue; } fieldIndex = i; break; } if (fieldIndex == -1) { throw new ArgumentException(string.Format("There is no field header with the name '{0}'", fieldName)); } if (fieldIndex > (record.Fields.Count - 1)) { throw new IndexOutOfRangeException(string.Format("There is no field at index {0} in record {1}.", fieldIndex, recordIndex)); } return(record.Fields[fieldIndex]); } set { if (recordIndex > (Records.Count - 1)) { throw new IndexOutOfRangeException(string.Format("There is no record at index {0}.", recordIndex)); } CsvRecord record = Records[recordIndex]; int fieldIndex = -1; for (int i = 0; i < Headers.Count; i++) { if (string.Compare(Headers[i], fieldName) != 0) { continue; } fieldIndex = i; break; } if (fieldIndex == -1) { throw new ArgumentException(string.Format("There is no field header with the name '{0}'", fieldName)); } if (fieldIndex > (record.Fields.Count - 1)) { throw new IndexOutOfRangeException(string.Format("There is no field at index {0} in record {1}.", fieldIndex, recordIndex)); } record.Fields[fieldIndex] = value; } }
public void ParsingTest9() { const string data = ","; using (CsvRecordReader csv = new CsvRecordReader(new System.IO.StringReader(data), false)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(String.Empty, record[0]); Assert.AreEqual(String.Empty, record[1]); Assert.AreEqual(',', csv.Delimiter); Assert.AreEqual(0, csv.CurrentRecordIndex); Assert.AreEqual(2, csv.FieldCount); Assert.IsNull(csv.Read()); } }
private void SaveCSV(string fname) { CsvData data = new CsvData(); CsvRecord header = new CsvRecord(); List <CsvItems> items = new List <CsvItems>(); foreach (string s in listCSV.Items) { header.Fields.Add(s); items.Add((CsvItems)Enum.Parse(typeof(CsvItems), s)); } data.Header = header; foreach (Game g in Games) { CsvRecord record = new CsvRecord(); data.Records.Add(record); foreach (CsvItems c in items) { switch (c) { case CsvItems.Title: record.Fields.Add(g.Title); break; case CsvItems.TitleId: record.Fields.Add(g.TitleId); break; case CsvItems.Genres: record.Fields.Add(g.Genre); break; case CsvItems.Date: record.Fields.Add(g.Date.ToString(CultureInfo.CurrentCulture)); break; case CsvItems.Type: record.Fields.Add(g.Type); break; case CsvItems.Disc: record.Fields.Add(g.Disc == 9999 ? "Expansion" : g.Disc.ToString(CultureInfo.CurrentCulture)); break; case CsvItems.Developer: record.Fields.Add(g.Developer); break; case CsvItems.Publisher: record.Fields.Add(g.Publisher); break; case CsvItems.WebPopulated: record.Fields.Add(g.WebPopulated ? "1" : "0"); break; case CsvItems.AbgxStatus: switch (g.AbgxStats) { case 0: record.Fields.Add("Not Checked"); break; case 1: record.Fields.Add("Ok"); break; case 2: record.Fields.Add("Unverified"); break; default: record.Fields.Add("Error"); break; } break; case CsvItems.IsActive: record.Fields.Add(g.Active ? "1" : "0"); break; case CsvItems.Regions: record.Fields.Add(g.Regions); break; case CsvItems.XEXCrc: record.Fields.Add(g.Abgx != null ? g.Abgx.XEXCRC.Crc.ToString("X8") : string.Empty); break; case CsvItems.GameCrc: record.Fields.Add(g.Abgx != null ? g.Abgx.GameCRC.Crc.ToString("X8") : string.Empty); break; case CsvItems.MediaId: record.Fields.Add(g.MediaId); break; case CsvItems.XKeyId: record.Fields.Add(g.ID); break; case CsvItems.FullPath: record.Fields.Add(g.FullIsoPath); break; case CsvItems.PartialPath: record.Fields.Add(g.FullIsoPath.Substring(g.GameDirectoy.Length + 1)); break; case CsvItems.DateDay: record.Fields.Add(g.Date.Day.ToString(CultureInfo.CurrentCulture)); break; case CsvItems.DateMonth: record.Fields.Add(g.Date.Month.ToString(CultureInfo.CurrentCulture)); break; case CsvItems.DateYear: record.Fields.Add(g.Date.Year.ToString(CultureInfo.CurrentCulture)); break; case CsvItems.NumberOfDiscs: record.Fields.Add(g.NumberOfDiscs < 1 ? "1" : (g.NumberOfDiscs == 9999) ? "1" : g.NumberOfDiscs.ToString(CultureInfo.CurrentCulture)); break; case CsvItems.Size: record.Fields.Add(g.Size.ToString(CultureInfo.CurrentCulture)); break; case CsvItems.RegionCode: record.Fields.Add(g.RegionCode); break; case CsvItems.Trailer: record.Fields.Add(g.Trailer); break; } } } File.WriteAllText(fname, CsvEncoder.Encode(data)); SaveFromSetting(); this.Close(); }
static int Main(string[] args) { //TODO: argument validation var tablesFileName = args[0]; var connectionString = args[1]; var fi = new FileInfo(tablesFileName); var tablesFileDirectory = fi.DirectoryName; List <KeyValuePair <string, string> > tables; using (var fs = File.OpenRead(tablesFileName)) { tables = fs.GetCsvRecords().Select(rec => new KeyValuePair <string, string>(rec[0], rec[1])).ToList(); } using (var connection = new SqlConnection(connectionString)) { connection.Open(); var getColumnsCmd = CreateGetColumnsCommand(connection); ILookup <string, ColumnDef> tableDefs = GetTableDefs(getColumnsCmd); foreach (KeyValuePair <string, string> keyValuePair in tables) { int tableRowCount = 0; string tableName = keyValuePair.Key; string tableDataFileName = keyValuePair.Value; string dataFile = Path.Combine(tablesFileDirectory, tableDataFileName); fi = new FileInfo(dataFile); var tableDataFileDirectory = fi.DirectoryName; using (var fs = File.OpenRead(dataFile)) { var recordEnumerator = fs.GetCsvRecords().GetEnumerator(); recordEnumerator.MoveNext(); CsvRecord headers = recordEnumerator.Current; SqlCommand insertOrUpdateCommand = MakeCommand(connection, tableDefs[tableName], headers); while (recordEnumerator.MoveNext()) { var currentRecord = recordEnumerator.Current; // put this row in the database either as an insert or an update. ClearParameters(insertOrUpdateCommand); foreach (CsvField field in currentRecord.Fields) { string parameterName = "@" + headers[field.Index].ToLower(); // handle backtick field values... which mean to retrieve the bytes from another file. if (field.Value.StartsWith("`") && field.Value.EndsWith("`")) { // get the data from a file var fieldValueFilePath = Path.Combine(tableDataFileDirectory, field.Value.Trim('`')); using (var stream = new FileStream(fieldValueFilePath, FileMode.Open, FileAccess.Read, FileShare.Read)) { var buffer = new byte[(int)stream.Length]; stream.Read(buffer, 0, (int)stream.Length); insertOrUpdateCommand.Parameters[parameterName].Value = buffer; } } else { insertOrUpdateCommand.Parameters[parameterName].Value = field.Value; } } tableRowCount += insertOrUpdateCommand.ExecuteNonQuery(); } // report progress to the console. Console.Out.WriteLine("Table [{0}]: {1}", tableName, tableRowCount); } //TODO: delete extra rows (without failing) if the tables file says to. } } return(0); }
public void CsvWriter_VerifyThatCarriageReturnsAreHandledCorrectlyInFieldValues() { CsvFile csvFile = new CsvFile(); csvFile.Headers.Add("header ,1"); csvFile.Headers.Add("header\r\n2"); csvFile.Headers.Add("header 3"); CsvRecord record = new CsvRecord(); record.Fields.Add("da,ta 1"); record.Fields.Add("\"data\" 2"); record.Fields.Add("data\n3"); csvFile.Records.Add(record); string content = string.Empty; using (CsvWriter writer = new CsvWriter()) { content = writer.WriteCsv(csvFile, Encoding.Default); } Assert.IsTrue(string.Compare(content, "\"header ,1\",\"header,2\",header 3\r\n\"da,ta 1\",\"\"\"data\"\" 2\",\"data,3\"\r\n") == 0); using (CsvWriter writer = new CsvWriter() { ReplaceCarriageReturnsAndLineFeedsFromFieldValues = false}) { content = writer.WriteCsv(csvFile, Encoding.Default); } Assert.IsTrue(string.Compare(content, "\"header ,1\",header\r\n2,header 3\r\n\"da,ta 1\",\"\"\"data\"\" 2\",data\n3\r\n") == 0); }
public void SkipEmptyLinesTest2() { using (CsvRecordReader csv = new CsvRecordReader(new StringReader("00\n\n10"), false)) { csv.SkipEmptyLines = true; Assert.AreEqual(1, csv.FieldCount); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("00", record[0]); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("10", record[0]); Assert.IsNull(csv.Read()); } }
public void ParsingTest35() { using (CsvRecordReader csv = new CsvRecordReader(new StringReader("\t"), false, '\t')) { Assert.AreEqual(2, csv.FieldCount); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(string.Empty, record[0]); Assert.AreEqual(string.Empty, record[1]); Assert.IsNull(csv.Read()); } }
private CsvFile CreateCsvFileFromDataTable(DataTable table) { CsvFile file = new CsvFile(); foreach (DataColumn column in table.Columns) file.Headers.Add(column.ColumnName); foreach (DataRow row in table.Rows) { CsvRecord record = new CsvRecord(); foreach (object o in row.ItemArray) { if (o is DateTime) record.Fields.Add(((DateTime)o).ToString("yyyy-MM-dd hh:mm:ss")); else record.Fields.Add(o.ToString()); } file.Records.Add(record); } return file; }
public void ParsingTest38() { using (CsvRecordReader csv = new CsvRecordReader(new StringReader("abc,def,ghi\n"), false)) { int fieldCount = csv.FieldCount; record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("abc", record[0]); Assert.AreEqual("def", record[1]); Assert.AreEqual("ghi", record[2]); Assert.IsNull(csv.Read()); } }
public void ParsingTest39() { using (CsvRecordReader csv = new CsvRecordReader(new StringReader("00,01, \n10,11, "), false, CsvReader.DefaultDelimiter, CsvReader.DefaultQuote, CsvReader.DefaultEscape, CsvReader.DefaultComment, ValueTrimmingOptions.UnquotedOnly, 1)) { int fieldCount = csv.FieldCount; record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("00", record[0]); Assert.AreEqual("01", record[1]); Assert.AreEqual("", record[2]); record = csv.Read(); Assert.NotNull(record); Assert.AreEqual("10", record[0]); Assert.AreEqual("11", record[1]); Assert.AreEqual("", record[2]); Assert.IsNull(csv.Read()); } }
private CsvFile CreateCsvFile(List<string> headers, List<string> fields) { CsvFile csvFile = new CsvFile(); headers.ForEach(header => csvFile.Headers.Add(header)); CsvRecord record = new CsvRecord(); fields.ForEach(field => record.Fields.Add(field)); csvFile.Records.Add(record); return csvFile; }
public void ParsingTest4() { const string data = "\"\n\r\n\n\r\r\",,\t,\n"; using (CsvRecordReader csv = new CsvRecordReader(new StringReader(data), false)) { record = csv.Read(); Assert.NotNull(record); Assert.AreEqual(4, csv.FieldCount); Assert.AreEqual("\n\r\n\n\r\r", record[0]); Assert.AreEqual("", record[1]); Assert.AreEqual("", record[2]); Assert.AreEqual("", record[3]); Assert.IsNull(csv.Read()); } }
/// <summary> /// Parses the line into a list of strings. /// </summary> /// <param name="line">The line.</param> /// <returns></returns> public static CsvRecord ParseLine(string line) { var values = new CsvRecord(); var currentValue = new StringBuilder(1024); char currentChar; Nullable <char> nextChar = null; var currentState = ReadState.WaitingForNewField; for (var charIndex = 0; charIndex < line.Length; charIndex++) { // Get the current and next character currentChar = line[charIndex]; nextChar = charIndex < line.Length - 1 ? line[charIndex + 1] : new Nullable <char>(); // Perform logic based on state and decide on next state switch (currentState) { case ReadState.WaitingForNewField: { currentValue.Clear(); if (currentChar == DoubleQuote) { currentState = ReadState.PushingQuoted; continue; } else if (currentChar == Comma) { values.Add(currentValue.ToString()); currentState = ReadState.WaitingForNewField; continue; } else { currentValue.Append(currentChar); currentState = ReadState.PushingNormal; continue; } } case ReadState.PushingNormal: { // Handle field content delimiter by comma if (currentChar == Comma) { currentState = ReadState.WaitingForNewField; values.Add(currentValue.ToString().Trim()); currentValue.Clear(); continue; } // Handle double quote escaping if (currentChar == DoubleQuote && nextChar == DoubleQuote) { // advance 1 character now. The loop will advance one more. currentValue.Append(currentChar); charIndex++; continue; } currentValue.Append(currentChar); break; } case ReadState.PushingQuoted: { // Handle field content delimiter by ending double quotes if (currentChar == DoubleQuote && nextChar != DoubleQuote) { currentState = ReadState.PushingNormal; continue; } // Handle double quote escaping if (currentChar == DoubleQuote && nextChar == DoubleQuote) { // advance 1 character now. The loop will advance one more. currentValue.Append(currentChar); charIndex++; continue; } currentValue.Append(currentChar); break; } } } // push anything that has not been pushed (flush) values.Add(currentValue.ToString().Trim()); return(values); }
//#if NET40 //[SuppressMessage("Usage", "CA1801:Nicht verwendete Parameter überprüfen", Justification = "<Ausstehend>")] //#endif public ColumnAliasesLookup(CsvRecord record, ReadOnlyCollection <string> aliases, int wildcardTimeout) { this.CsvRecordIdentifier = record.Identifier; IEqualityComparer <string>? comparer = record.Comparer; ReadOnlyCollection <string>?columnNames = record.ColumnNames; //this.Aliases = aliases.Intersect(record.Keys, comparer).Distinct(comparer).ToList(); for (int i = 0; i < aliases.Count; i++) { string alias = aliases[i]; if (alias is null) { continue; } if (HasWildcard(alias)) { #if NET40 Regex regex = InitRegex(comparer, alias); #else Regex regex = InitRegex(comparer, alias, wildcardTimeout); #endif for (int k = 0; k < columnNames.Count; k++) // Die Wildcard könnte auf alle keys passen. { string columnName = columnNames[k]; try { if (regex.IsMatch(columnName)) { this.ColumnName = columnName; return; } } catch (TimeoutException) { #if !NET40 Debug.WriteLine(nameof(RegexMatchTimeoutException)); #endif } } } else { for (int j = 0; j < columnNames.Count; j++) { string columnName = columnNames[j]; if (comparer.Equals(columnName, alias)) // Es kann in columnNames keine 2 Strings geben, auf die das zutrifft. { this.ColumnName = columnName; return; } } } }