public ActionResult Index(HttpPostedFileBase file) { if (file.ContentLength > 0) { var fileName = Path.GetFileName(file.FileName); var path = Path.Combine(Server.MapPath("~/App_Data"), fileName); if (System.IO.File.Exists(path)) System.IO.File.Delete(path); file.SaveAs(path); //Stream reader will read test.csv file in current folder StreamReader sr = new StreamReader(path); //Csv reader reads the stream CsvReader csvread = new CsvReader(sr); //csvread will fetch all record in one go to the IEnumerable object record IEnumerable<MenuItem> records = csvread.GetRecords<MenuItem>(); context.Database.ExecuteSqlCommand("delete from Menus"); context.SaveChanges(); foreach (MenuItem item in records) { Menu m = new Menu(); m.Name = item.Name; m.DayOfWeek = item.DayOfWeek; m.Price = item.Price; context.Menus.Add(m); context.SaveChanges(); } sr.Close(); ViewBag.ImportSuccess = "Import CSV file successfully!"; } return View(); }
public void SecurityControllerTest() { //http://www.davepaquette.com/archive/2014/03/18/seeding-entity-framework-database-from-csv.aspx //http://adrianmejia.com/blog/2011/07/18/cs-getmanifestresourcestream-gotcha Assembly assembly = Assembly.GetExecutingAssembly(); string resourceName = "ShareWealth.Web.Test.SeedData.stockPrice.csv"; ApplicationDbContext context = new ApplicationDbContext(); using (Stream stream = assembly.GetManifestResourceStream(resourceName)) { using (StreamReader reader = new StreamReader(stream, Encoding.UTF8)) { CsvReader csvReader = new CsvReader(reader); csvReader.Configuration.WillThrowOnMissingField = false; var stockPrice = csvReader.GetRecords<StockPrice>().ToArray(); for (int i = 1; i < 4 ; i++) { var sp = new StockPrice { Id = i, TradingDate = new DateTime(2013,12,5), SecurityId = stockPrice[i].SecurityId, Open = stockPrice[i].Open, High = stockPrice[i].High, Low = stockPrice[i].Low, Close = stockPrice[i].Close, }; context.StockPrices.Add(sp); } context.SaveChanges(); } } }
public async Task<List<JobDetails>> GetJobDetailsAsync(string pipelineName, string stageName, string jobName) { try { var searchUrl = Urls.SearchUrl; var query = new NameValueCollection { {nameof(searchUrl), searchUrl}, {nameof(pipelineName), pipelineName}, {nameof(stageName), stageName}, {nameof(jobName), jobName}, {@"limitCount", jobLimitCount.ToString()}, {@"limitPipeline", "latest"} }; var uri = new Uri(searchUrl).AttachParameters(query); var stream = await Dispatch.GetRawStreamAsync(uri); using (var reader = new StreamReader(stream)) { using (var csvReader = new CsvReader(reader)) { var records = csvReader.GetRecords<JobDetails>(); return records.ToList(); } } } catch (Exception ex) { Console.WriteLine(ex.Message); return Enumerable.Empty<JobDetails>().ToList(); } }
private static int? BatchProcessGraceCc(CsvReader csv, DateTime date, int? fundid) { BundleHeader bh = null; var fid = fundid ?? BatchImportContributions.FirstFundId(); while (csv.ReadNextRecord()) { var dt = csv[3].ToDate(); var amount = csv[13]; if (!amount.HasValue() || !dt.HasValue) continue; var routing = csv[10]; var account = csv[9]; var checkno = csv[12]; if (bh == null) bh = BatchImportContributions.GetBundleHeader(dt.Value, DateTime.Now); var bd = BatchImportContributions.AddContributionDetail(date, fid, amount, checkno, routing, account); bh.BundleDetails.Add(bd); } BatchImportContributions.FinishBundle(bh); return bh.BundleHeaderId; }
private void dataGridViewMaker_CellMouseClick(object sender, DataGridViewCellMouseEventArgs e) { try { selectIndex = e.RowIndex; // selectRow = dataGridViewMaker.Rows[selectIndex].Cells[2].Value.ToString(); selectIdBrand = int.Parse(dataGridViewMaker.Rows[selectIndex].Cells[1].Value.ToString()); using (CsvReader csv = new CsvReader(new StreamReader(filePathMoels), true, ';')) { int i = 0; dataGridViewModel.Rows.Clear(); while (csv.ReadNextRecord()) { if (int.Parse(csv[1]) == selectIdBrand) { dataGridViewModel.Rows.Add(); dataGridViewModel.Rows[i].Cells[0].Value = i + 1; dataGridViewModel.Rows[i].Cells[1].Value = csv[0]; dataGridViewModel.Rows[i].Cells[2].Value = csv[2]; i++; } } } } catch { } }
private static int? BatchProcessFbcFayetteville(CsvReader csv, DateTime date, int? fundid) { var cols = csv.GetFieldHeaders(); BundleHeader bh = null; var firstfund = FirstFundId(); var fund = fundid ?? firstfund; while (csv.ReadNextRecord()) { if (csv[6].StartsWith("Total Checks")) continue; var routing = csv[4]; var account = csv[5]; var checkno = csv[6]; var amount = csv[7]; if (bh == null) bh = GetBundleHeader(date, DateTime.Now); var bd = AddContributionDetail(date, fund, amount, checkno, routing, account); bh.BundleDetails.Add(bd); } if (bh == null) return null; FinishBundle(bh); return bh.BundleHeaderId; }
public void LastFieldEmptyFollowedByMissingFieldsOnNextRecord() { const string Data = "a,b,c,d,e" + "\na,b,c,d," + "\na,b,"; using (var csv = new CsvReader(new StringReader(Data), false)) { csv.MissingFieldAction = MissingFieldAction.ReplaceByNull; var record = new string[5]; Assert.IsTrue(csv.ReadNextRecord()); csv.CopyCurrentRecordTo(record); Assert.AreEqual(new string[] { "a", "b", "c", "d", "e" }, record); Assert.IsTrue(csv.ReadNextRecord()); csv.CopyCurrentRecordTo(record); Assert.AreEqual(new string[] { "a", "b", "c", "d", "" }, record); Assert.IsTrue(csv.ReadNextRecord()); csv.CopyCurrentRecordTo(record); Assert.AreEqual(new string[] { "a", "b", "", null, null }, record); Assert.IsFalse(csv.ReadNextRecord()); } }
public bool ParseData(string filename) { using (CsvReader csv = new CsvReader(new StreamReader(filename), true)) { int fieldCount = csv.FieldCount; int rollFieldIndex = csv.GetFieldIndex("Roll Input (ratio)"); int pitchFieldIndex = csv.GetFieldIndex("Pitch Input (ratio)"); int yawFieldIndex = csv.GetFieldIndex("Yaw Input (ratio)"); if (rollFieldIndex == -1 || pitchFieldIndex == -1 || yawFieldIndex == -1) { return false; } int currentIndex = 0; while (csv.ReadNextRecord() && currentIndex < n_fileRowCount) { f_timeInputArray[currentIndex] = float.Parse(csv[0]); f_rollInputArray[currentIndex] = float.Parse(csv[rollFieldIndex]); f_pitchInputArray[currentIndex] = float.Parse(csv[pitchFieldIndex]); f_yawInputArray[currentIndex] = float.Parse(csv[yawFieldIndex]); f_maxTime = f_timeInputArray[currentIndex]; currentIndex++; } f_startTime = f_timeInputArray[0]; n_dataPoints = currentIndex; } return true; }
public List<DictionaryMapped> GetUsersFromCsv() { if (!File.Exists(CsvFilePath)) { throw new FileNotFoundException("Could not locate CSV at path " + CsvFilePath, CsvFilePath); } try { using (var sr = new StreamReader(CsvFilePath)) { Log.Debug(string.Format("Attempting to parse user CSV sheet {0}.", CsvFilePath)); using (var reader = new CsvReader(sr)) { var records = reader.GetRecords<dynamic>(); return records.Select(r => new DictionaryMapped(r as IDictionary<string, object>)).ToList(); } } } catch (Exception fl) { Log.Error("An error occured loading the CSV file " + CsvFilePath, fl, this); throw; } }
public void BooleanTypeConverterTest() { var stream = new MemoryStream(); var writer = new StreamWriter( stream ); writer.WriteLine( "BoolColumn,BoolNullableColumn,StringColumn" ); writer.WriteLine( "true,true,1" ); writer.WriteLine( "True,True,2" ); writer.WriteLine( "1,1,3" ); writer.WriteLine( "false,false,4" ); writer.WriteLine( "False,False,5" ); writer.WriteLine( "0,0,6" ); writer.Flush(); stream.Position = 0; var reader = new StreamReader( stream ); var csvReader = new CsvReader( reader ); var records = csvReader.GetRecords<TestBoolean>().ToList(); Assert.IsTrue( records[0].BoolColumn ); Assert.IsTrue( records[0].BoolNullableColumn); Assert.IsTrue( records[1].BoolColumn); Assert.IsTrue( records[1].BoolNullableColumn ); Assert.IsTrue( records[2].BoolColumn ); Assert.IsTrue( records[2].BoolNullableColumn ); Assert.IsFalse( records[3].BoolColumn ); Assert.IsFalse( records[3].BoolNullableColumn); Assert.IsFalse( records[4].BoolColumn ); Assert.IsFalse( records[4].BoolNullableColumn ); Assert.IsFalse( records[5].BoolColumn ); Assert.IsFalse( records[5].BoolNullableColumn ); }
public static List<IPRangeCountry> Download() { using (var client = new WebClient()) { byte[] zippedData = client.DownloadData("http://download.ip2location.com/lite/IP2LOCATION-LITE-DB1.CSV.ZIP"); using (var zippedFileStream = new MemoryStream(zippedData)) using (ZipFile zipFile = ZipFile.Read(zippedFileStream)) { ZipEntry compressedFileInZip = zipFile.SelectEntries("*.csv").FirstOrDefault(); using (var csvStream = new MemoryStream()) { compressedFileInZip.Extract(csvStream); csvStream.Position = 0; using (var textReader = new StreamReader(csvStream)) using (var csv = new CsvReader(textReader)) { csv.Configuration.RegisterClassMap<IPRangeCountryMap>(); csv.Configuration.HasHeaderRecord = false; csv.Read(); return csv.GetRecords<IPRangeCountry>().ToList(); } } } } }
public void ArgumentTestCopyCurrentRecordTo3() { using (CsvReader csv = new CsvReader(new StringReader(CsvReaderSampleData.SampleData1), false)) { csv.CopyCurrentRecordTo(new string[1], 1); } }
private void btnConfigMandosImportFromCSV_Click(object sender, EventArgs e) { DialogResult dr = this.openFileDialog1.ShowDialog(); if (dr == System.Windows.Forms.DialogResult.OK) { try { string file = openFileDialog1.FileName; List<KeyPadList> lst = new List<KeyPadList>(); using (CsvReader csv = new CsvReader(new StreamReader(file), true, txtCsvDelimiter.Text.ToCharArray().First())) { string[] headers = csv.GetFieldHeaders(); while (csv.ReadNextRecord()) { KeyPadList field = new KeyPadList(); field.Id = Convert.ToInt32( csv[0]); field.Nombre = csv[1]; lst.Add(field); } } gridKeyPads.AutoGenerateColumns = true; gridKeyPads.DataSource = lst; } catch (ArgumentOutOfRangeException outOfRange) { MessageBox.Show("Archivo CSV con formato incorrecto", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } catch (Exception) { throw; } } }
public void Blah() { using( var stream = new MemoryStream() ) using( var reader = new StreamReader( stream ) ) using( var writer = new StreamWriter( stream ) ) using( var csv = new CsvReader( reader ) ) { writer.WriteLine( "Id,Name" ); writer.WriteLine( "1,one" ); writer.Flush(); stream.Position = 0; var records = csv.GetRecords<Test>().ToList(); var position = stream.Position; writer.WriteLine( "2,two" ); writer.Flush(); stream.Position = position; records = csv.GetRecords<Test>().ToList(); writer.WriteLine( "2,two" ); writer.Flush(); stream.Position = position; Assert.AreEqual( 1, records.Count ); Assert.AreEqual( 2, records[0].Id ); Assert.AreEqual( "two", records[0].Name ); } }
public static List<Tuple<int, string, int, string>> GetStudentData() { //C:\Users\Heather\Documents\Students.csv using (var sr = new StreamReader("Students.csv")) { var reader = new CsvReader(sr); //return reader.GetRecords<StudentDataReader>(); IEnumerable<StudentDataReader> students = reader.GetRecords<StudentDataReader>(); List<Tuple<int, string, int, string>> allStudents = new List<Tuple<int, string, int, string>>(); //Dictionary<int, Tuple<int, string, string>> allStudents = new Dictionary<int, Tuple<int, string, string>>(); //return students; foreach (var student in students) { allStudents.Add(new Tuple<int, string, int, string>( student.user_id, student.user_name, student.course_id, student.state )); } return allStudents; } }
private static async Task LoadFlightsAsync(string csvFilename) { Console.WriteLine("Loading {0}", Path.GetFileName(csvFilename)); var collection = __database.GetCollection<Flight>("flights"); var batchSize = 1000; using (var csvReader = new CsvReader(new StreamReader(csvFilename))) { var flights = new List<Flight>(); foreach (var flight in csvReader.GetRecords<Flight>()) { PreprocessFlight(flight); flights.Add(flight); if (flights.Count == batchSize) { await collection.InsertManyAsync(flights); flights.Clear(); Console.Write("."); } } if (flights.Count > 0) { await collection.InsertManyAsync(flights); } } Console.WriteLine(); }
protected void btnUpload_Click(object sender, EventArgs e) { try { using (CsvReader csv = new CsvReader(new StreamReader(fileUpload.FileContent), true)) { int count = 0; DepartmentOrderTableAdapters.DepartmentOrderRowTableAdapter orderTA = new DepartmentOrderTableAdapters.DepartmentOrderRowTableAdapter(); while (csv.ReadNextRecord()) { string fullAddress = csv[0] + csv[1] + csv[2] + csv[3] + csv[4] + csv[5]; IEnumerator iEnumerator = orderTA.InsertAndReturn(false, (int)Session[WebConstants.Session.REG_DEPT_ID], (int)Session[WebConstants.Session.REG_CO_ID], WebConstants.Default.NOT_SET, WebConstants.Default.NOT_SET, WebConstants.Default.NOT_SET, DateTime.Now, 0, DateTime.Now.AddYears(1), false, "", csv[0], csv[1], csv[2], csv[3], csv[4], csv[5], fullAddress, "", false, false, false, null, null, null, null, (int)Session[WebConstants.Session.REG_USER_ID], DateTime.Now, (int)Session[WebConstants.Session.REG_USER_ID], DateTime.Now).GetEnumerator(); if (iEnumerator.MoveNext()) { count++; } } SetInfoMessage(count + " folders added to the system."); } } catch { SetErrorMessage("Error processing the file. Please check its contents. File should be CSV and in the format shown below"); } }
private static int? BatchProcessAbundantLife(CsvReader csv, DateTime date, int? fundid) { BundleHeader bh = null; var fid = fundid ?? BatchImportContributions.FirstFundId(); var prevbatch = ""; while (csv.ReadNextRecord()) { var batch = csv[5]; if (bh == null || batch != prevbatch) { if (bh != null) BatchImportContributions.FinishBundle(bh); bh = BatchImportContributions.GetBundleHeader(date, DateTime.Now); prevbatch = batch; continue; // the first row of a batch is a total row } var amount = csv[3]; var routing = csv[8]; var account = csv[9]; var checkno = csv[10]; var bd = BatchImportContributions.AddContributionDetail(date, fid, amount, checkno, routing, account); bh.BundleDetails.Add(bd); } BatchImportContributions.FinishBundle(bh); return bh?.BundleHeaderId; }
public ActionResult Upload(HttpPostedFileBase productFile) { string path = null; if (productFile.ContentLength > 0) { var filename = Path.GetFileName(productFile.FileName); path = AppDomain.CurrentDomain.BaseDirectory + "Upload\\" + filename; productFile.SaveAs(path); var csv = new CsvReader(new StreamReader(path)); csv.Configuration.RegisterClassMap<ProductMap>(); var productList = csv.GetRecords<Product>(); foreach(var product in productList) { product.Date = DateTime.Now; db.Products.Add(product); } db.SaveChanges(); } return View(); }
public ShowInfoCache[] FetchShowNamesFromEpGuides() { var request = WebRequest.Create("http://epguides.com/common/allshows.txt"); var response = request.GetResponse(); using (var r = new StreamReader(response.GetResponseStream())) { //title,directory,tvrage,start date,end date,number of episodes,run time,network,country var parser = new CsvReader(r); var ids = new HashSet<string>(); //The datasource is a bit crap so we need to remove their dupes var result = new List<ShowInfoCache>(); while (parser.Read()) { DateTimeOffset d; var hasEnded = false; if(DateTimeOffset.TryParseExact( parser.GetField("end date"), "MMM yyyy", CultureInfo.GetCultureInfo("en-US"), DateTimeStyles.None, out d)) { hasEnded = true; } var id = ShowInfoCache.IdFromSourceId(parser.GetField("tvrage")); if (!ids.Contains(id)) result.Add(new ShowInfoCache { Id = id, Name = parser.GetField("title"), HasEnded = hasEnded }); ids.Add(id); } return result.ToArray(); } }
private static int? Import(CsvReader csv, DateTime date, int? fundid) { BundleHeader bundleHeader = null; var fid = fundid ?? BatchImportContributions.FirstFundId(); var details = new List<BundleDetail>(); while (csv.ReadNextRecord()) { var batchDate = csv[0].ToDate(); var amount = csv[14]; var type = csv[13]; if (!amount.HasValue() || !batchDate.HasValue || type == "Credit") continue; var routingNumber = csv[17]; var accountNumber = csv[16]; var checkNumber = csv[15]; if (bundleHeader == null) bundleHeader = BatchImportContributions.GetBundleHeader(batchDate.Value, DateTime.Now); details.Add(BatchImportContributions.AddContributionDetail(date, fid, amount, checkNumber, routingNumber, accountNumber)); } details.Reverse(); foreach (var bd in details) { bundleHeader.BundleDetails.Add(bd); } BatchImportContributions.FinishBundle(bundleHeader); return bundleHeader.BundleHeaderId; }
internal static CsvData ParseFileAndGetCsvData(string filePath, UploadConfig config) { CsvConfiguration configuration = new CsvConfiguration(); configuration.HasHeaderRecord = true; CsvReader csvReader = new CsvReader(new StreamReader(filePath), configuration); string[] header = default(string[]); List<string[]> rows = new List<string[]>(); string[] row; while (csvReader.Read()) { header = csvReader.FieldHeaders; row = new string[header.Length]; for (int j = 0; j < header.Length; j++) { row[j] = csvReader.GetField(j); } rows.Add(row); } return new CsvData { Header = header, Rows = rows }; }
public CsvImporter(string csvFilePath, string schemaFilePath, IUserRepository repository) { _csvFilePath = csvFilePath; _repository = repository; _reader = new CsvReader(File.OpenText(csvFilePath), true); ValidateAgainstSchema(_reader, schemaFilePath); }
public static int? BatchProcessSilverdale(CsvReader csv, DateTime date, int? fundid) { var cols = csv.GetFieldHeaders(); BundleHeader bh = null; var firstfund = FirstFundId(); var fund = fundid ?? firstfund; while (csv.ReadNextRecord()) { var excludecol = csv[12] == "Virtual Credit Item"; var routing = csv[18]; var account = csv[19]; var amount = csv[20]; var checkno = csv[24]; if (excludecol) { if (bh != null) FinishBundle(bh); bh = GetBundleHeader(date, DateTime.Now); continue; } if (bh == null) bh = GetBundleHeader(date, DateTime.Now); var bd = AddContributionDetail(date, fund, amount, checkno, routing, account); bh.BundleDetails.Add(bd); } FinishBundle(bh); return bh.BundleHeaderId; }
public void GetRecordGenericTest() { var parser = new ParserMock { { "Id", "Name" }, { "a", "b" }, null }; var reader = new CsvReader( parser ); reader.Read(); try { reader.GetRecord<Simple>(); Assert.Fail(); } catch( CsvTypeConverterException ex ) { var expected = "Row: '2' (1 based)\r\n" + "Type: 'CsvHelper.Tests.Exceptions.ExceptionMessageTests+Simple'\r\n" + "Field Index: '0' (0 based)\r\n" + "Field Name: 'Id'\r\n" + "Field Value: 'a'\r\n"; Assert.AreEqual( expected, ex.Data["CsvHelper"] ); } }
public void ArgumentTestCtor4() { using (CsvReader csv = new CsvReader(new StringReader(""), false, 123)) { Assert.AreEqual(123, csv.BufferSize); } }
private List<BondCalculatorModel> ReadTestInput(string testfile,bool expectYield) { List<BondCalculatorModel> inputs = new List<BondCalculatorModel>(); using (StreamReader reader = File.OpenText(testfile)) { CsvReader csv = new CsvReader(reader); while (csv.Read()) { BondCalculatorModel model = new BondCalculatorModel(); model.CouponRate = csv.GetField<decimal>(0); model.YearsToMaturity = csv.GetField<int>(1); model.Frequency = csv.GetField<int>(2); model.FaceValue = csv.GetField<decimal>(3); if (expectYield) { model.PresentValue = csv.GetField<decimal>(4); model.Yield = csv.GetField<decimal>(5); } else { model.Yield = csv.GetField<decimal>(4); model.PresentValue = csv.GetField<decimal>(5); } inputs.Add(model); } } return inputs; }
public static void CheckSampleData1(CsvReader csv, bool readToEnd) { if (readToEnd) { Assert.AreEqual(CsvReaderSampleData.SampleData1FieldCount, csv.FieldCount); if (csv.HasHeaders) { Assert.AreEqual(0, csv.GetFieldIndex(SampleData1Header0)); Assert.AreEqual(1, csv.GetFieldIndex(SampleData1Header1)); Assert.AreEqual(2, csv.GetFieldIndex(SampleData1Header2)); Assert.AreEqual(3, csv.GetFieldIndex(SampleData1Header3)); Assert.AreEqual(4, csv.GetFieldIndex(SampleData1Header4)); Assert.AreEqual(5, csv.GetFieldIndex(SampleData1Header5)); } Assert.AreEqual(-1, csv.CurrentRecordIndex); int recordCount = 0; while (csv.ReadNextRecord()) { CheckSampleData1(csv.CurrentRecordIndex, csv); recordCount++; } if (csv.HasHeaders) Assert.AreEqual(CsvReaderSampleData.SampleData1RecordCount, recordCount); else Assert.AreEqual(CsvReaderSampleData.SampleData1RecordCount + 1, recordCount); } else CheckSampleData1(csv.CurrentRecordIndex, csv); }
bool loadTrivia() { var config = app.Settings.Configs[configTrivia]; var fileName = config.Get(keyDatabase, fileDatabase); if ( !File.Exists(fileName) ) { Log.Warn(tag, "Could not load database; '{0}' is missing", fileName); return false; } var file = new StreamReader(fileName); var reader = new CsvReader(file); var fileEntries = reader.GetRecords<TriviaEntry>(); var list = new List<TriviaEntry>(); foreach ( var entry in fileEntries ) { if ( entry.Question.Trim() == "" || entry.Answer.Trim() == "" ) continue; if ( entry.Wrong.Trim() == "" ) entry.Wrong = null; list.Add(entry); } entries = shuffleEntries(list); reader.Dispose(); file .Dispose(); Log.Debug(tag, "Loaded trivia database '{0}', {1} entries", fileName, entries.Length); return true; }
public ActionResult Index(HttpPostedFileBase file) { List<Register> register = new List<Register>(); //get data from csv try { if (file.ContentLength > 0) { IEnumerable<Register> records; using (var reader = new CsvReader(new StreamReader(file.FileName))) { //no header record. This property is trure by default. reader.Configuration.HasHeaderRecord = false; //use a mapping file to pull the data into the class correctly by index reader.Configuration.RegisterClassMap<RegisterClassMapping>(); //Fill Class from comma delimited file records = reader.GetRecords<Register>(); //need to have the records in a list so converting from the ienumberable csvhelper needed register = records.ToList(); } } } catch (Exception ex) { ViewBag.Error = "Upload Failed: " + ex.Message; } return View(register); }
/// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public void Dispose() { _reader = null; _current = null; }
public ActionResult FindTagPeople(string text, string tagname) { if (!tagname.HasValue()) { return(Content("no tag")); } var csv = new CsvReader(new StringReader(text), false, '\t').ToList(); if (!csv.Any()) { return(Content("no data")); } var line0 = csv.First().ToList(); var names = line0.ToDictionary(i => i.TrimEnd(), i => line0.FindIndex(s => s == i)); var ActiveNames = new List <string> { "First", "Last", "Birthday", "Email", "CellPhone", "HomePhone", }; var hasvalidcolumn = false; foreach (var name in names.Keys) { if (ActiveNames.Contains(name)) { hasvalidcolumn = true; break; } } if (!hasvalidcolumn) { return(Content("no valid column")); } var list = new List <FindInfo>(); foreach (var a in csv.Skip(1)) { var row = new FindInfo(); row.First = FindColumn(names, a, "First"); row.Last = FindColumn(names, a, "Last"); row.Birthday = FindColumnDate(names, a, "Birthday"); row.Email = FindColumn(names, a, "Email"); row.CellPhone = FindColumnDigits(names, a, "CellPhone"); row.HomePhone = FindColumnDigits(names, a, "HomePhone"); var pids = DbUtil.Db.FindPerson3(row.First, row.Last, row.Birthday, row.Email, row.CellPhone, row.HomePhone, null); row.Found = pids.Count(); if (row.Found == 1) { row.PeopleId = pids.Single().PeopleId.Value; } list.Add(row); } var q = from pi in list where pi.PeopleId.HasValue select pi.PeopleId; foreach (var pid in q.Distinct()) { Person.Tag(DbUtil.Db, pid.Value, tagname, Util.UserPeopleId, DbUtil.TagTypeId_Personal); } DbUtil.Db.SubmitChanges(); return(View(list)); }
private const string USPSWebtoolUserID = "738STUDE3658"; // enter your USPS userID here public static void Main(string[] args) { Stopwatch stopWatch = new Stopwatch(); stopWatch.Start(); Validator validator = new Validator(USPSWebtoolUserID, true); Address addr = new Address(); var file = "/Users/nideshchitrakar/Documents/CHE database/address verification/2017 PC Mail list/2017/Cohort 2017-Table 1.csv"; int totalScanned = 0; int totalErrors = 0; int totalActionReq = 0; // replace the following with directory to write the error and correct addresses files in // files will be created if no files already exists var errorFile = "/Users/nideshchitrakar/Documents/CHE database/address verification/2017 PC Mail list/2017/error-list.csv"; var correctFile = "/Users/nideshchitrakar/Documents/CHE database/address verification/2017 PC Mail list/2017/correct-list.csv"; var actionFile = "/Users/nideshchitrakar/Documents/CHE database/address verification/2017 PC Mail list/2017/action-list.csv"; CsvWriter errorWriter = new CsvWriter(errorFile); CsvWriter correctWriter = new CsvWriter(correctFile); CsvWriter actionWriter = new CsvWriter(actionFile); // open a file which is a CSV file with headers to read from using (CsvReader csv = new CsvReader(new StreamReader(file), true)) { int fieldCount = csv.FieldCount; //var errorHeaders = csv.GetFieldHeaders().ToList(); //errorHeaders.Add("Error"); //var correctHeaders = csv.GetFieldHeaders().ToList(); //correctHeaders.Add("Action Required"); //errorWriter.WriteHeader(errorHeaders); //correctWriter.WriteHeader(correctHeaders); var headers = new List <string> { "StudentID", "FirstName", "LastName", "Address1", "Address2", "City", "State", "ZipCode" }; var correctHeaders = new List <string>(); correctHeaders.AddRange(headers); correctHeaders.Add("FormattedAddress"); var errorHeaders = new List <string>(); errorHeaders.AddRange(headers); errorHeaders.Add("Error"); var actionHeaders = new List <string>(); actionHeaders.AddRange(headers); actionHeaders.Add("FormattedAddress"); actionHeaders.Add("Action Required"); correctWriter.WriteHeader(correctHeaders); errorWriter.WriteHeader(errorHeaders); actionWriter.WriteHeader(actionHeaders); while (csv.ReadNextRecord()) { totalScanned += 1; // the csv file must have headers Address, City, State, and ZipCode // else change the following to match the csv headers addr.Address1 = csv["Address2"]; addr.Address2 = csv["Address1"]; addr.City = csv["City"]; addr.State = csv["State"]; addr.Zip = csv["ZipCode"]; var result = validator.ValidateAddress(addr); if (result.ContainsKey("Error")) { totalErrors += 1; List <string> row = new List <string>(); //for (int i = 0; i < fieldCount; i++) for (int i = 0; i < headers.Count(); i++) { row.Add(csv[headers[i]]); } row.Add(result["Error"]); errorWriter.AppendRow(row); } else if (result.ContainsKey("Action Required")) { totalActionReq += 1; List <string> row = new List <string>(); //for (int i = 0; i < fieldCount; i++) for (int i = 0; i < headers.Count(); i++) { row.Add(csv[headers[i]]); } row.Add(result["Formatted Address"]); row.Add(result["Action Required"]); actionWriter.AppendRow(row); } else { List <string> row = new List <string>(); //for (int i = 0; i < fieldCount; i++) for (int i = 0; i < headers.Count(); i++) { row.Add(csv[headers[i]]); } row.Add(result["Formatted Address"]); //if (result.ContainsKey("Action Required")) //{ // totalActionReq += 1; // row.Add(result["Action Required"]); //} correctWriter.AppendRow(row); } } } stopWatch.Stop(); // get the elapsed time as a TimeSpan value TimeSpan ts = stopWatch.Elapsed; // format and display the TimeSpan value string elapsedTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", ts.Hours, ts.Minutes, ts.Seconds, ts.Milliseconds / 10); Console.WriteLine(); Console.WriteLine("Run Time: " + elapsedTime); Console.WriteLine("Total records scanned: " + totalScanned); Console.WriteLine("Total errors detected: " + totalErrors); Console.WriteLine("Total actions required: " + totalActionReq); }
public void ProcessInboundSalesFile() { try { using (var reader = new StreamReader(_path)) using (var csv = new CsvReader(reader)) { //Truncate inbound sales staging file var dal = new DapperDatabaseAccess <BroadridgeSalesInbound>(_connectionString); dal.SqlExecute("dbo.prc_BroadridgeSales_Staging_Truncate", null); //Ingest sales into staging table List <BroadridgeSalesInbound> inboundSales = csv.GetRecords <BroadridgeSalesInbound>().ToList(); foreach (var item in inboundSales) { var p = new DynamicParameters(); p.Add("@TradeID", item.TradeID); p.Add("@TransactionCodeOverrideDescription", item.TransactionCodeOverrideDescription); p.Add("@TradeDate", item.TradeDate); p.Add("@SettledDate", item.SettledDate); p.Add("@SuperSheetDate", item.SuperSheetDate); p.Add("@TradeAmount", item.TradeAmount); p.Add("@System", item.System); p.Add("@DealerNum", item.DealerNum); p.Add("@DealerBranchBranchCode", item.DealerBranchBranchCode); p.Add("@RepCode", item.RepCode); p.Add("@FirmId", item.FirmId); p.Add("@FirmName", item.FirmName); p.Add("@OfficeAddressLine1", item.OfficeAddressLine1); p.Add("@OfficeCity", item.OfficeCity); p.Add("@OfficeRegionRefCode", item.OfficeRegionRefCode); p.Add("@OfficePostalCode", item.OfficePostalCode); p.Add("@PersonFirstName", item.PersonFirstName); p.Add("@PersonLastName", item.PersonLastName); p.Add("@LineOfBusiness", item.LineOfBusiness); p.Add("@Channel", item.Channel); p.Add("@Region", item.Region); p.Add("@Territory", item.Territory); p.Add("@ProductNasdaqSymbol", item.ProductNasdaqSymbol); p.Add("@ProductName", item.ProductName); p.Add("@AccountTANumber", item.AccountTANumber); p.Add("@AccountId", item.AccountId); p.Add("@ExternalAccountNumber", item.ExternalAccountNumber); p.Add("@HoldingId", item.HoldingId); p.Add("@HoldingExternalAccountNumber", item.HoldingExternalAccountNumber); p.Add("@HoldingName", item.HoldingName); dal.SqlExecute("dbo.prc_BroadridgeSales_Staging_Save", p); } //Transform from staging to production var parms = new DynamicParameters(); parms.Add("@ReportingDate", DateTimeHandlers.GetQuarterEndDateFromYearQuarter(_reportingDate).ToString("yyyyMMdd")); // parms.Add("@ReportingDate", _reportingDate); dal.SqlExecute("dbo.prc_BroadridgeSales_Transform", parms); } } catch (Exception e) { Console.WriteLine(e); throw; } }
public void ProcessQueue(CloudBlockBlob blob, string strContainer, CloudQueueMessage receivedMessage, CloudQueue urbanWaterQueue) { EventSourceWriter.Log.MessageMethod("Worker Role Prcessing HeadEndSystem file " + receivedMessage.Id.ToString() + " " + blob.Name); SQLAzureDataContext currentContext = new SQLAzureDataContext(); using (TextReader sr = new StringReader(blob.DownloadText())) { using (var csv = new CsvReader(sr)) { csv.Configuration.Delimiter = ";"; while (csv.Read()) { try { MeterReadingEntity sm = new MeterReadingEntity(); DMAMeterReadingEntity dmasm = new DMAMeterReadingEntity(); if (csv.CurrentRecord[0] != null && csv.CurrentRecord[3] != null && csv.CurrentRecord[4] != null && csv.CurrentRecord[5] != null && string.Compare(csv.CurrentRecord[5], "LITER") == 0) { DateTime creationDateTime = DateTime.ParseExact(csv.CurrentRecord[3], "dd/MM/yyyy HH:mm:ss", null).ToUniversalTime(); // DMAMeterTableStorageContext dmamcontext = new DMAMeterTableStorageContext(strContainer + "_dmameter"); // DMAMeterEntity dmaMeterEntity = dmamcontext.MeterReadings.FirstOrDefault(x => x.PartitionKey == csv.CurrentRecord[0]); string dmaId = currentContext.DMAs.Where(x => x.Name == "El Toyo – Retamar").FirstOrDefault().Identifier; sm.PartitionKey = csv.CurrentRecord[0]; sm.CreatedOn = creationDateTime; sm.RowKey = creationDateTime.Ticks.ToString(); sm.Reading = csv.CurrentRecord[4]; sm.Encrypted = false; sm.DMA = dmaId; dmasm.PartitionKey = dmaId; dmasm.CreatedOn = creationDateTime; dmasm.RowKey = creationDateTime.Ticks.ToString(); dmasm.Reading = csv.CurrentRecord[4]; dmasm.Encrypted = false; dmasm.MeterID = csv.CurrentRecord[0]; } else { EventSourceWriter.Log.MessageMethod("ERROR: Null values parsed in HeadEndSystem file " + blob.Name); } string strWriteConnectionString = "MKWDNConnectionString"; bool blAttempt = WriteMessageMeterDataToDataTable(sm, dmasm, strWriteConnectionString, strContainer); if (!blAttempt) { EventSourceWriter.Log.MessageMethod("ERROR:Processing Entry NOT added to storage " + receivedMessage.Id.ToString()); EventSourceWriter.Log.MessageMethod("ERROR:Processing Entry NOT added to storage " + blob.Name + " " + sm.PartitionKey); } } catch (Exception e) { EventSourceWriter.Log.MessageMethod("Exception processing meter reading: " + e.Message); } } } } EventSourceWriter.Log.MessageMethod("Processing Complete Entry added to storage " + receivedMessage.Id.ToString()); urbanWaterQueue.DeleteMessage(receivedMessage); Event newEvent = new Event(); newEvent.BusDispatched = false; newEvent.EventDateTime = DateTime.Now; newEvent.EventType = (int)EventTypes.NewMeteringData; currentContext.Events.InsertOnSubmit(newEvent); currentContext.SubmitChanges(); }
public ExtraColumnCsvDataReader(CsvReader csv, string columnName, string columnValue) : base(csv) { this.columnName = columnName; this.columnValue = columnValue; this.csv = csv; }
public int?RunImport(string text, DateTime date, int?fundid, bool fromFile) { using (var csv = new CsvReader(new StringReader(text))) return(BatchProcessKindred(csv, date, fundid)); }
private async static Task DoWork(Options options) { _options = options; var foregroundColor = Console.ForegroundColor; try { AzureIpRange ipAddresses; using (StreamReader file = File.OpenText(options.AzureServiceTagsJson)) { JsonSerializer serializer = new JsonSerializer(); ipAddresses = (AzureIpRange)serializer.Deserialize(file, typeof(AzureIpRange)); } List <FirewallLogEntry> _firewallLogs; using (var reader = new StreamReader(options.LogFile)) using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture)) { if (_options.UseTabDelimiter) { csv.Configuration.Delimiter = "\t"; } else if (_options.UseCommaDelimiter) { csv.Configuration.Delimiter = ","; } else { csv.Configuration.Delimiter = ";"; } csv.Configuration.IgnoreBlankLines = true; csv.Configuration.TrimOptions = TrimOptions.Trim; _firewallLogs = csv.GetRecords <FirewallLogEntry>().ToList(); } var _logParser = new LogParser(); int _hitCounter = 0; var tasks = new List <Task>(); using (var progressBar = new ProgressBar(_firewallLogs.Count, "Analyzing logs")) { foreach (var firewallLogEntry in _firewallLogs) { if (options.Verbose) { _hitCounter += firewallLogEntry.TimesBlocked; await ProcessFirewallLogEntry(ipAddresses, _logParser, _hitCounter, firewallLogEntry); progressBar.Tick(); } else { _hitCounter += firewallLogEntry.TimesBlocked; tasks.Add(ProcessFirewallLogEntry(ipAddresses, _logParser, _hitCounter, firewallLogEntry)); } } while (tasks.Any(t => t.IsCompleted == false)) { var task = await Task.WhenAny(tasks); tasks.Remove(task); progressBar.Tick(); } } await Task.WhenAll(tasks); Console.ForegroundColor = foregroundColor; Console.WriteLine($"Total Hit Count: {_logParser.TotalHitCount} | Azure: {_logParser.AzureHitCount} | Non-Azure: {_logParser.NonAzureHitCount}"); Console.WriteLine($"CSV Parser Hit Count: {_hitCounter}"); foreach (var item in _logParser.GetAzureIps()) { Console.WriteLine(""); Console.WriteLine($"{item.Value.Tag} | {item.Value.Region} | {item.Value.IpRange} | {item.Value.AzureService} | Total Hits: {item.Value.TotalHitCount}"); foreach (var ip in item.Value.Matches) { Console.WriteLine($" {ip.Key} | {ip.Value.HitCount}"); } } var nonAzureIp = _logParser.GetNonAzureIps(); Console.WriteLine(""); Console.WriteLine($"{nonAzureIp.Tag} | Total Hits: {nonAzureIp.TotalHitCount}"); foreach (var ip in nonAzureIp.Matches) { Console.WriteLine($" {ip.Key} | {ip.Value.HitCount}"); } } catch (Exception ex) { Console.Write(ex); } }
private void ProcesoCarga() { objclsfrmBases.MensajeProceso = "Procesando carga de datos..."; int porcentaje = 0; int intTotal = 2400000; decimal j = 0; try { //Eliminar los registros solicitados bool resultado = m_procesogestionDP.EliminarProcesoGestionTasas(conexion); #region Crear dataTable DataTable dtDP = new DataTable(); dtDP.Columns.Add("codcategoria", typeof(string)); dtDP.Columns.Add("categoria", typeof(string)); dtDP.Columns.Add("codsubcategoria", typeof(string)); dtDP.Columns.Add("subcategoria", typeof(string)); dtDP.Columns.Add("cuenta", typeof(string)); dtDP.Columns.Add("tipodocumento", typeof(string)); dtDP.Columns.Add("numerodocumento", typeof(string)); dtDP.Columns.Add("nombre", typeof(string)); dtDP.Columns.Add("ejecutivo", typeof(string)); dtDP.Columns.Add("estado", typeof(string)); dtDP.Columns.Add("descestado", typeof(string)); dtDP.Columns.Add("fecproceso", typeof(string)); dtDP.Columns.Add("mesproceso", typeof(string)); dtDP.Columns.Add("fecapertura", typeof(string)); dtDP.Columns.Add("mesapertura", typeof(string)); dtDP.Columns.Add("fecrenovacion", typeof(string)); dtDP.Columns.Add("mesrenovacion", typeof(string)); dtDP.Columns.Add("fecvencimiento", typeof(string)); dtDP.Columns.Add("mesvencimiento", typeof(string)); dtDP.Columns.Add("msaldo", typeof(string)); dtDP.Columns.Add("tasa", typeof(string)); dtDP.Columns.Add("numeradortasa", typeof(double)); dtDP.Columns.Add("tipomoneda", typeof(string)); dtDP.Columns.Add("plazo", typeof(string)); dtDP.Columns.Add("plazoactual", typeof(string)); dtDP.Columns.Add("numeradorplzpact", typeof(string)); dtDP.Columns.Add("numeradorplzact", typeof(string)); dtDP.Columns.Add("saldodisponible", typeof(string)); dtDP.Columns.Add("rucempleador", typeof(string)); dtDP.Columns.Add("codtienda", typeof(string)); dtDP.Columns.Add("desctienda", typeof(string)); dtDP.Columns.Add("montoconvenio", typeof(string)); dtDP.Columns.Add("diacargo", typeof(string)); dtDP.Columns.Add("fecinicioconv", typeof(string)); dtDP.Columns.Add("cuentacmr", typeof(string)); dtDP.Columns.Add("tarjetacmr", typeof(string)); dtDP.Columns.Add("codigoejecutivo", typeof(string)); dtDP.Columns.Add("tipocambio", typeof(decimal)); #endregion using (var sr = new StreamReader(objclsfrmBases.pathNombreArchivosDP, System.Text.Encoding.Default, false)) { var reader = new CsvReader(sr); IEnumerable <EntidadProyMDIBFGestionTasasDP.ProcesoGestionDP> records = reader.GetRecords <EntidadProyMDIBFGestionTasasDP.ProcesoGestionDP>(); objclsfrmBases.BotonMenuMantenimientos = false; objclsfrmBases.BotonMenuProcesos = false; objclsfrmBases.BotonProcesar = false; objclsfrmBases.BotonLimpiar = false; objclsfrmBases.BotonSalir = false; objclsfrmBases.BotonAgregarDP = false; objclsfrmBases.TextoTipoCambio = false; dtDP.Rows.Clear(); foreach (EntidadProyMDIBFGestionTasasDP.ProcesoGestionDP record in records) { DataRow drDP = dtDP.NewRow(); #region Asignar valores dataTable drDP["codcategoria"] = record.codcategoria.Trim(); drDP["categoria"] = record.categoria.Trim(); drDP["codsubcategoria"] = record.codsubcategoria.Trim(); drDP["subcategoria"] = record.subcategoria.Replace('ó', 'o').Replace('á', 'a').Replace('?', 'o'); drDP["cuenta"] = record.cuenta.Trim(); drDP["tipodocumento"] = record.tipodocumento.Trim(); if (record.numerodocumento.Length > 7) { drDP["numerodocumento"] = record.numerodocumento.Trim(); } if (record.numerodocumento.Length == 7) { drDP["numerodocumento"] = "0" + record.numerodocumento.Trim(); } if (record.numerodocumento.Length == 6) { drDP["numerodocumento"] = "00" + record.numerodocumento.Trim(); } if (record.numerodocumento.Length == 5) { drDP["numerodocumento"] = "000" + record.numerodocumento.Trim(); } drDP["nombre"] = record.nombre.Replace(',', ' '); drDP["ejecutivo"] = record.ejecutivo.Trim(); drDP["estado"] = record.estado.Trim(); drDP["descestado"] = record.descestado.Trim(); drDP["fecproceso"] = record.fecproceso.Trim(); drDP["mesproceso"] = record.mesproceso.Trim(); drDP["fecapertura"] = record.fecapertura.Trim(); drDP["mesapertura"] = record.mesapertura.Trim(); drDP["fecrenovacion"] = record.fecrenovacion.Trim(); drDP["mesrenovacion"] = record.mesrenovacion.Trim(); drDP["fecvencimiento"] = record.fecvencimiento.Trim(); drDP["mesvencimiento"] = record.mesvencimiento.Trim(); drDP["msaldo"] = record.msaldo.Trim(); drDP["tasa"] = record.tasa.Trim(); drDP["numeradortasa"] = record.numeradortasa.Trim(); drDP["tipomoneda"] = record.tipomoneda.Trim(); drDP["plazo"] = record.plazo.Trim(); drDP["plazoactual"] = record.plazoactual.Trim(); drDP["numeradorplzpact"] = record.numeradorplzpact.Trim(); drDP["numeradorplzact"] = record.numeradorplzact.Trim(); drDP["saldodisponible"] = record.saldodisponible.Trim(); drDP["rucempleador"] = record.rucempleador.Trim(); drDP["codtienda"] = record.codtienda.Trim(); drDP["desctienda"] = record.desctienda.Trim(); drDP["montoconvenio"] = record.montoconvenio.Trim(); drDP["diacargo"] = record.diacargo.Trim(); drDP["fecinicioconv"] = record.fecinicioconv.Trim(); drDP["cuentacmr"] = record.cuentacmr.Trim(); drDP["tarjetacmr"] = record.tarjetacmr.Trim(); drDP["codigoejecutivo"] = record.codigoejecutivo.Trim(); drDP["tipocambio"] = objclsfrmBases.TipoCambio; #endregion dtDP.Rows.Add(drDP); if (j == 1000000 || j == 1800000) { #region Inserciones temporales using (var conexionBulkCopy = new SqlConnection(conexion)) { conexionBulkCopy.Open(); using (SqlBulkCopy bulkCopy = new SqlBulkCopy(conexion)) { bulkCopy.BulkCopyTimeout = int.MaxValue; bulkCopy.DestinationTableName = "dbo.ProcesoGestionTasas"; bulkCopy.ColumnMappings.Add("codcategoria", "codcategoria"); bulkCopy.ColumnMappings.Add("categoria", "categoria"); bulkCopy.ColumnMappings.Add("codsubcategoria", "codsubcategoria"); bulkCopy.ColumnMappings.Add("subcategoria", "subcategoria"); bulkCopy.ColumnMappings.Add("cuenta", "cuenta"); bulkCopy.ColumnMappings.Add("tipodocumento", "tipodocumento"); bulkCopy.ColumnMappings.Add("numerodocumento", "numerodocumento"); bulkCopy.ColumnMappings.Add("nombre", "nombre"); bulkCopy.ColumnMappings.Add("ejecutivo", "ejecutivo"); bulkCopy.ColumnMappings.Add("estado", "estado"); bulkCopy.ColumnMappings.Add("descestado", "descestado"); bulkCopy.ColumnMappings.Add("fecproceso", "fecproceso"); bulkCopy.ColumnMappings.Add("mesproceso", "mesproceso"); bulkCopy.ColumnMappings.Add("fecapertura", "fecapertura"); bulkCopy.ColumnMappings.Add("mesapertura", "mesapertura"); bulkCopy.ColumnMappings.Add("fecrenovacion", "fecrenovacion"); bulkCopy.ColumnMappings.Add("mesrenovacion", "mesrenovacion"); bulkCopy.ColumnMappings.Add("fecvencimiento", "fecvencimiento"); bulkCopy.ColumnMappings.Add("mesvencimiento", "mesvencimiento"); bulkCopy.ColumnMappings.Add("msaldo", "msaldo"); bulkCopy.ColumnMappings.Add("tasa", "tasa"); bulkCopy.ColumnMappings.Add("numeradortasa", "numeradortasa"); bulkCopy.ColumnMappings.Add("tipomoneda", "tipomoneda"); bulkCopy.ColumnMappings.Add("plazo", "plazo"); bulkCopy.ColumnMappings.Add("plazoactual", "plazoactual"); bulkCopy.ColumnMappings.Add("numeradorplzpact", "numeradorplzpact"); bulkCopy.ColumnMappings.Add("numeradorplzact", "numeradorplzact"); bulkCopy.ColumnMappings.Add("saldodisponible", "saldodisponible"); bulkCopy.ColumnMappings.Add("rucempleador", "rucempleador"); bulkCopy.ColumnMappings.Add("codtienda", "codtienda"); bulkCopy.ColumnMappings.Add("desctienda", "desctienda"); bulkCopy.ColumnMappings.Add("montoconvenio", "montoconvenio"); bulkCopy.ColumnMappings.Add("diacargo", "diacargo"); bulkCopy.ColumnMappings.Add("fecinicioconv", "fecinicioconv"); bulkCopy.ColumnMappings.Add("cuentacmr", "cuentacmr"); bulkCopy.ColumnMappings.Add("tarjetacmr", "tarjetacmr"); bulkCopy.ColumnMappings.Add("codigoejecutivo", "codigoejecutivo"); bulkCopy.ColumnMappings.Add("tipocambio", "tipocambio"); bulkCopy.WriteToServer(dtDP); } } #endregion dtDP.Rows.Clear(); } j++; porcentaje = Convert.ToInt32((j / intTotal) * 100); bw1.ReportProgress(porcentaje); } } using (var conexionBulkCopy = new SqlConnection(conexion)) { conexionBulkCopy.Open(); using (SqlBulkCopy bulkCopy = new SqlBulkCopy(conexion)) { bulkCopy.BulkCopyTimeout = int.MaxValue; bulkCopy.DestinationTableName = "dbo.ProcesoGestionTasas"; #region Datatable bullcopy bulkCopy.ColumnMappings.Add("codcategoria", "codcategoria"); bulkCopy.ColumnMappings.Add("categoria", "categoria"); bulkCopy.ColumnMappings.Add("codsubcategoria", "codsubcategoria"); bulkCopy.ColumnMappings.Add("subcategoria", "subcategoria"); bulkCopy.ColumnMappings.Add("cuenta", "cuenta"); bulkCopy.ColumnMappings.Add("tipodocumento", "tipodocumento"); bulkCopy.ColumnMappings.Add("numerodocumento", "numerodocumento"); bulkCopy.ColumnMappings.Add("nombre", "nombre"); bulkCopy.ColumnMappings.Add("ejecutivo", "ejecutivo"); bulkCopy.ColumnMappings.Add("estado", "estado"); bulkCopy.ColumnMappings.Add("descestado", "descestado"); bulkCopy.ColumnMappings.Add("fecproceso", "fecproceso"); bulkCopy.ColumnMappings.Add("mesproceso", "mesproceso"); bulkCopy.ColumnMappings.Add("fecapertura", "fecapertura"); bulkCopy.ColumnMappings.Add("mesapertura", "mesapertura"); bulkCopy.ColumnMappings.Add("fecrenovacion", "fecrenovacion"); bulkCopy.ColumnMappings.Add("mesrenovacion", "mesrenovacion"); bulkCopy.ColumnMappings.Add("fecvencimiento", "fecvencimiento"); bulkCopy.ColumnMappings.Add("mesvencimiento", "mesvencimiento"); bulkCopy.ColumnMappings.Add("msaldo", "msaldo"); bulkCopy.ColumnMappings.Add("tasa", "tasa"); bulkCopy.ColumnMappings.Add("numeradortasa", "numeradortasa"); bulkCopy.ColumnMappings.Add("tipomoneda", "tipomoneda"); bulkCopy.ColumnMappings.Add("plazo", "plazo"); bulkCopy.ColumnMappings.Add("plazoactual", "plazoactual"); bulkCopy.ColumnMappings.Add("numeradorplzpact", "numeradorplzpact"); bulkCopy.ColumnMappings.Add("numeradorplzact", "numeradorplzact"); bulkCopy.ColumnMappings.Add("saldodisponible", "saldodisponible"); bulkCopy.ColumnMappings.Add("rucempleador", "rucempleador"); bulkCopy.ColumnMappings.Add("codtienda", "codtienda"); bulkCopy.ColumnMappings.Add("desctienda", "desctienda"); bulkCopy.ColumnMappings.Add("montoconvenio", "montoconvenio"); bulkCopy.ColumnMappings.Add("diacargo", "diacargo"); bulkCopy.ColumnMappings.Add("fecinicioconv", "fecinicioconv"); bulkCopy.ColumnMappings.Add("cuentacmr", "cuentacmr"); bulkCopy.ColumnMappings.Add("tarjetacmr", "tarjetacmr"); bulkCopy.ColumnMappings.Add("codigoejecutivo", "codigoejecutivo"); bulkCopy.ColumnMappings.Add("tipocambio", "tipocambio"); #endregion bulkCopy.WriteToServer(dtDP); } } objclsfrmBases.BotonMenuMantenimientos = true; objclsfrmBases.BotonMenuProcesos = true; //objclsfrmBases.BotonActualizar = true; objclsfrmBases.BotonProcesar = true; objclsfrmBases.BotonLimpiar = true; objclsfrmBases.BotonSalir = true; objclsfrmBases.BotonAgregarDP = true; objclsfrmBases.TextoTipoCambio = true; bw1.ReportProgress(100); //Insertar el proceso EntidadProyMDIBFGestionTasasDP.proceso _proceso = new EntidadProyMDIBFGestionTasasDP.proceso(); _proceso.responsableId = 1; _proceso.nombrearchivo = Path.GetFileName(objclsfrmBases.pathNombreArchivosDP); _proceso.tipo = "Archivo Stock BF"; _intRetorno = m_proceso.InsertarProceso(conexion, _proceso); MessageBox.Show("Archivo(s) procesados con éxito.", Application.ProductName, MessageBoxButtons.OK, MessageBoxIcon.Information); } catch (Exception ex) { MessageBox.Show("Ocurrió un error en el proceso: " + ex.Message, Application.ProductName, MessageBoxButtons.OK, MessageBoxIcon.Information); objclsfrmBases.BotonMenuMantenimientos = true; objclsfrmBases.BotonMenuProcesos = true; //objclsfrmBases.BotonActualizar = true; objclsfrmBases.BotonProcesar = true; objclsfrmBases.BotonLimpiar = true; objclsfrmBases.BotonSalir = true; objclsfrmBases.BotonAgregarDP = true; objclsfrmBases.TextoTipoCambio = true; bw1.ReportProgress(100); } }
public bool Load(string file_path) { using (CsvReader csv = new CsvReader(new StreamReader(file_path), true)) { csv.MissingFieldAction = MissingFieldAction.ReplaceByEmpty; string[] headers = csv.GetFieldHeaders(); List <int> fieldIdxList = new List <int>(); try { fieldIdxList.Add(csv.GetFieldIndex(FieldName_CsvEffectHurtConfig.Field_Name_id)); fieldIdxList.Add(csv.GetFieldIndex(FieldName_CsvEffectHurtConfig.Field_Name_value)); fieldIdxList.Add(csv.GetFieldIndex(FieldName_CsvEffectHurtConfig.Field_Name_filter_id)); } catch (Exception e) { errMsg = e.Message; return(false); } csv.ReadNextRecord(); while (csv.ReadNextRecord()) { Dictionary <string, string> kvPairs = new Dictionary <string, string>(); foreach (int fieldIdx in fieldIdxList) { kvPairs[headers[fieldIdx]] = csv[fieldIdx]; } if (string.IsNullOrWhiteSpace(kvPairs[headers[0]])) { continue; } CsvEffectHurtConfig cfg = new CsvEffectHurtConfig(); if (!cfg.Init(kvPairs, cfg_check_fun)) { errMsg = string.Format("CsvEffectHurtConfig Init Fail, row {0}", csv.CurrentRecordIndex); return(false); } cfg_vec.Add(cfg); } } foreach (var cfg in cfg_vec) { { if (id_to_key.ContainsKey(cfg.id)) { errMsg = string.Format("CsvEffectHurtConfigSet repeated key id = {0}", cfg.id); return(false); } id_to_key[cfg.id] = cfg; } } if (null != cfg_set_check_fun) { if (!cfg_set_check_fun(this)) { errMsg = "CsvEffectHurtConfigSet cfg_set_check_fun fail"; return(false); } } return(true); }
public void MoveAndDeleteAsync(string text) { AsyncManager.OutstandingOperations.Increment(); var host = Util.Host; ThreadPool.QueueUserWorkItem(e => { var sb = new StringBuilder(); sb.Append("<h2>done</h2>\n<p><a href='/'>home</a></p>\n"); using (var csv = new CsvReader(new StringReader(text), false, '\t')) { while (csv.ReadNextRecord()) { if (csv.FieldCount != 2) { sb.AppendFormat("expected two ids, row {0}<br/>\n", csv[0]); continue; } var fromid = csv[0].ToInt(); var toid = csv[1].ToInt(); var db = DbUtil.Create(host); var p = db.LoadPersonById(fromid); if (p == null) { sb.AppendFormat("fromid {0} not found<br/>\n", fromid); db.Dispose(); continue; } var tp = db.LoadPersonById(toid); if (tp == null) { sb.AppendFormat("toid {0} not found<br/>\n", toid); db.Dispose(); continue; } try { p.MovePersonStuff(db, toid); db.SubmitChanges(); } catch (Exception ex) { sb.AppendFormat("error on move ({0}, {1}): {2}<br/>\n", fromid, toid, ex.Message); db.Dispose(); continue; } try { db.PurgePerson(fromid); sb.AppendFormat("moved ({0}, {1}) successful<br/>\n", fromid, toid); } catch (Exception ex) { sb.AppendFormat("error on delete ({0}): {1}<br/>\n", fromid, ex.Message); } finally { db.Dispose(); } } } AsyncManager.Parameters["results"] = sb.ToString(); AsyncManager.OutstandingOperations.Decrement(); }); }
public void ProcessInboundAssetsFile() { try { using (var reader = new StreamReader(_path)) using (var csv = new CsvReader(reader)) { //Truncate inbound sales staging file var dal = new DapperDatabaseAccess <BroadridgeAssetsInbound>(_connectionString); dal.SqlExecute("dbo.prc_BroadridgeAssets_Staging_Truncate", null); //Ingest sales into staging table List <BroadridgeAssetsInbound> inboundAssets = csv.GetRecords <BroadridgeAssetsInbound>().ToList(); // DbHandlers.BulkInsertToDatabase(ListDataTable.ListToDataTable(inboundAssets), _connectionString, 1000, _metaData.stagingTable); foreach (var item in inboundAssets) { var p = new DynamicParameters(); p.Add("@System", item.System); p.Add("@FirmName", item.FirmName); p.Add("@FirmId", item.FirmId); p.Add("@FirmCRDNumber", item.FirmCRDNumber); p.Add("@HoldingId", item.HoldingId); p.Add("@HoldingExternalAccountNumber", item.HoldingExternalAccountNumber); p.Add("@HoldingName", item.HoldingName); p.Add("@HoldingStartDate", item.HoldingStartDate); p.Add("@HoldingCreateDate", item.HoldingCreateDate); p.Add("@MostRecentMonthAssetBalance", item.MostRecentMonthAssetBalance); p.Add("@Month1AgoAssetBalance", item.Month1AgoAssetBalance); p.Add("@Month2AgoAssetBalance", item.Month2AgoAssetBalance); p.Add("@Month3AgoAssetBalance", item.Month3AgoAssetBalance); p.Add("@ProductName", item.ProductName); p.Add("@ProductType", item.ProductType); p.Add("@Channel", item.Channel); p.Add("@Region", item.Region); p.Add("@Territory", item.Territory); p.Add("@PersonCRDNumber", item.PersonCRDNumber); p.Add("@PersonFirstName", item.PersonFirstName); p.Add("@PersonLastName", item.PersonLastName); p.Add("@PersonId", item.PersonId); p.Add("@OfficeAddressLine1", item.OfficeAddressLine1); p.Add("@OfficeAddressLine2", item.OfficeAddressLine2); p.Add("@OfficeCity", item.OfficeCity); p.Add("@OfficeRegionRefCode", item.OfficeRegionRefCode); p.Add("@OfficePostalCode", item.OfficePostalCode); p.Add("@PersonBrokerTeamFlag", item.PersonBrokerTeamFlag); p.Add("@Month4AgoAssetBalance", item.Month4AgoAssetBalance); p.Add("@Month5AgoAssetBalance", item.Month5AgoAssetBalance); p.Add("@Month6AgoAssetBalance", item.Month6AgoAssetBalance); p.Add("@Month7AgoAssetBalance", item.Month7AgoAssetBalance); p.Add("@Month8AgoAssetBalance", item.Month8AgoAssetBalance); p.Add("@Month9AgoAssetBalance", item.Month9AgoAssetBalance); p.Add("@Month10AgoAssetBalance", item.Month10AgoAssetBalance); p.Add("@Month11AgoAssetBalance", item.Month11AgoAssetBalance); p.Add("@Month12AgoAssetBalance", item.Month12AgoAssetBalance); p.Add("@HoldingAddressLine1", item.HoldingAddressLine1); p.Add("@AccountTANumber", item.AccountTANumber); p.Add("@ExternalAccountNumber", item.ExternalAccountNumber); p.Add("@AccountId", item.AccountId); dal.SqlExecute("dbo.prc_BroadridgeAssets_Staging_Save", p); } //Transform from staging to production var parms = new DynamicParameters(); parms.Add("@ReportingDate", DateTimeHandlers.GetQuarterEndDateFromYearQuarter(_reportingDate).ToString("yyyyMMdd")); dal.SqlExecute("dbo.prc_BroadridgeAssets_Transform", parms); } } catch (Exception e) { Console.WriteLine(e); throw; } }
private void createButton_Click(object sender, EventArgs e) { State s = new State(); if (File.Exists(fileBox.Text)) { using (CsvReader csv = new CsvReader(new StreamReader(fileBox.Text), true)) { //extract field count and headers from csv int fieldCount = csv.FieldCount; string[] headers = csv.GetFieldHeaders(); bool area = false; #region Header csv.ReadNextRecord(); headers = csv[0].Split(','); s.Name = headers[2].Trim(); //quarter based off of headers[3] in format Jun, 20XX int quarter = 20; headers = headers[3].Split(' '); switch (headers[1]) { case "Mar": quarter = 1; break; case "Jun": quarter = 2; break; case "Sep": quarter = 3; break; case "Dec": quarter = 4; break; } quarters.Remove(quarter); #endregion for (int i = 0; i < 3; i++) { csv.ReadNextRecord(); } Region r = null; while (csv.ReadNextRecord()) { #region Region Name int room; int bed; int employee; double octOcc; double novOcc; double decOcc; double octStay; double novStay; double decStay; if (csv[0].Contains("(TR)")) { if (csv[0].Contains("Total")) { area = false; //Region Data string n = csv[0].Trim(); string rString = csv[2].Trim().Replace(",", ""); if (rString.Equals("") != true) { room = int.Parse(rString); } else { room = 0; } string bString = csv[3].Trim().Replace(",", ""); if (bString.Equals("") != true) { bed = int.Parse(bString); } else { bed = 0; } string eString = csv[4].Trim().Replace(",", ""); if (eString.Equals("") != true) { employee = int.Parse(eString); } else { employee = 0; } try { // CultureInfo.InvariantCulture Double.TryParse(csv[9].Trim(), out octOcc); } catch (Exception ex) { octOcc = 0.0; } try { Double.TryParse(csv[10].Trim(), out novOcc); } catch (Exception ex) { novOcc = 0.0; } try { Double.TryParse(csv[11].Trim(), out decOcc); } catch (Exception ex) { decOcc = 0.0; } try { Double.TryParse(csv[24].Trim(), out octStay); } catch (Exception ex) { octStay = 0.0; } try { Double.TryParse(csv[25].Trim(), out novStay); } catch (Exception ex) { novStay = 0.0; } try { Double.TryParse(csv[26].Trim(), out decStay); } catch (Exception ex) { decStay = 0.0; } r.Rooms = room; r.Beds = bed; r.Employees = employee; r.Occupancy[9] = octOcc; r.Occupancy[10] = novOcc; r.Occupancy[11] = decOcc; r.Stay[9] = octStay; r.Stay[10] = novStay; r.Stay[11] = decStay; Regions.Add(r); } else { string n = csv[0].Replace(" (TR)", ""); r = new Region(n); area = true; } #endregion } else if (area) { #region RegionArea string n = csv[0].Trim(); // string rString = csv[2].Trim().Replace(",", ""); if (rString.Equals("") != true) { room = int.Parse(rString); } else { room = 0; } string bString = csv[3].Trim().Replace(",", ""); if (bString.Equals("") != true) { bed = int.Parse(bString); } else { bed = 0; } string eString = csv[4].Trim().Replace(",", ""); if (eString.Equals("") != true) { employee = int.Parse(eString); } else { employee = 0; } try { // CultureInfo.InvariantCulture Double.TryParse(csv[9].Trim(), out octOcc); } catch (Exception ex) { octOcc = 0.0; } try { Double.TryParse(csv[10].Trim(), out novOcc); } catch (Exception ex) { novOcc = 0.0; } try { Double.TryParse(csv[11].Trim(), out decOcc); } catch (Exception ex) { decOcc = 0.0; } try { Double.TryParse(csv[24].Trim(), out octStay); } catch (Exception ex) { octStay = 0.0; } try { Double.TryParse(csv[25].Trim(), out novStay); } catch (Exception ex) { novStay = 0.0; } try { Double.TryParse(csv[26].Trim(), out decStay); } catch (Exception ex) { decStay = 0.0; } r.addArea(new Area(n, quarter, room, bed, employee, octOcc, novOcc, decOcc, octStay, novStay, decStay)); } #endregion } } createButton.Visible = false; updateButton.Visible = true; } s.Regions = Regions; country.addState(s); }
public static void TestDeserializingClassesFromCsv() { const string csvFileName = "Objects.csv"; // Create a nonstandard CSV-File File.WriteAllText(csvFileName, new StringBuilder() .AppendLine( "Unterrichtstag;Unterrichtsbeginn;Vollständiger Name;Unterrichtsfach;") .AppendLine( "Wednesday;14:30;Susi Meyer;Piano") .AppendLine( "Thursday;15:15;Carl Czerny;Piano;") .AppendLine( ";;Frederic Chopin") .ToString()); // Initialize a CsvRecordWrapper which retrieves the data from // the CSV-Columns and converts it to the right data type. // Aliases with wildcards can be used to match the column-headers // of the CSV file. var wrapper = new CsvRecordWrapper(); // Reuse a converter for more than one property: ICsvTypeConverter stringConverter = CsvConverterFactory.CreateConverter(CsvTypeCode.String, nullable: true); wrapper.AddProperty ( new CsvProperty("Name", new string[] { "*name" }, stringConverter) ); wrapper.AddProperty ( new CsvProperty("Subject", new string[] { "*subject", "*fach" }, stringConverter) ); wrapper.AddProperty ( new CsvProperty("LessonDay", new string[] { "*day", "*tag" }, CsvConverterFactory .CreateEnumConverter <DayOfWeek>(nullable: true)) ); wrapper.AddProperty ( new CsvProperty("LessonBegin", new string[] { "*begin?" }, CsvConverterFactory .CreateConverter(CsvTypeCode.TimeSpan, nullable: true)) ); // Analyze the CSV file to determine the right parameters // for proper reading: var analyzer = new CsvAnalyzer(); analyzer.Analyze(csvFileName); // Read the CSV file: using var reader = new CsvReader(csvFileName, analyzer.HasHeaderRow, analyzer.Options, analyzer.FieldSeparator); var pupilsList = new List <Pupil>(); foreach (CsvRecord record in reader.Read()) { wrapper.Record = record; // Using a dynamic variable enables you to assign // the properties without having to explicitely cast them // to the target data type: dynamic dynWrapper = wrapper; pupilsList.Add(new Pupil { Name = dynWrapper.Name, LessonBegin = dynWrapper.LessonBegin, LessonDay = dynWrapper.LessonDay, Subject = dynWrapper.Subject }); } // Write the results to Console: foreach (Pupil pupil in pupilsList) { Console.WriteLine(pupil); Console.WriteLine(); } }
List <lwg_Dealer> ParseCSVFileToDealerList(string fullPath) { List <lwg_Dealer> dealerList = new List <lwg_Dealer>(); CsvReader csv = new CsvReader(new StreamReader(fullPath), true); csv.MissingFieldAction = MissingFieldAction.ReplaceByNull; int fieldCount = csv.FieldCount; string[] headers = csv.GetFieldHeaders(); while (csv.ReadNextRecord()) { lwg_Dealer dealer = new lwg_Dealer(); // Dealer ID if (csv[0] != null) { dealer.DealerID = csv[0]; } else { LWGLog.WriteLog("import dealer", "null of dealerId"); continue; } // Dealer Name if (csv[1] != null) { dealer.Name = csv[1]; } else { LWGLog.WriteLog("import dealer", string.Format("{0}: empty name", csv[0])); continue; } // Address Line 1 if (csv[2] != null) { dealer.AddressLine1 = csv[2]; } // Address Line 2 if (csv[3] != null) { dealer.AddressLine2 = csv[3]; } // City if (csv[4] != null) { dealer.City = csv[4]; } // State if (csv[5] != null) { dealer.State = csv[5]; } // Zip if (csv[6] != null) { dealer.Zip = csv[6]; } // Phone if (csv[7] != null) { dealer.Phone = csv[7]; } // Fax if (csv[8] != null) { dealer.Fax = csv[8]; } // WebAddress if (csv[9] != null) { dealer.WebAddress = csv[9]; } // Contact if (csv[10] != null) { dealer.Contact = csv[10]; } // NewIssue if (csv[11] != null) { dealer.NewIssue = csv[11]; } dealerList.Add(dealer); } return(dealerList); }
public void addNewMonths() { int quarter; using (CsvReader csv = new CsvReader(new StreamReader(fileBox.Text), true)) { //extract field count and headers from csv int fieldCount = csv.FieldCount; string[] headers = csv.GetFieldHeaders(); csv.ReadNextRecord(); headers = csv[0].Split(','); string name = headers[2].Trim(); if (selectState(name)) { //quarter based off of headers[3] in format Jun, 20XX quarter = 20; headers = headers[3].Split(' '); switch (headers[1]) { case "Mar": quarter = 1; break; case "Jun": quarter = 2; break; case "Sep": quarter = 3; break; case "Dec": quarter = 4; break; } quarters.Remove(quarter); bool area = false; for (int i = 0; i < 3; i++) { csv.ReadNextRecord(); } while (csv.ReadNextRecord()) { int room; int bed; int employee; double octOcc; double novOcc; double decOcc; double octStay; double novStay; double decStay; if (csv[0].Contains("(TR)")) { if (csv[0].Contains("Total")) { area = false; string n = csv[0].Trim(); // string rString = csv[2].Trim().Replace(",", ""); if (rString.Equals("") != true) { room = int.Parse(rString); } else { room = 0; } string bString = csv[3].Trim().Replace(",", ""); if (bString.Equals("") != true) { bed = int.Parse(bString); } else { bed = 0; } string eString = csv[4].Trim().Replace(",", ""); if (eString.Equals("") != true) { employee = int.Parse(eString); } else { employee = 0; } try { // CultureInfo.InvariantCulture Double.TryParse(csv[9].Trim(), out octOcc); } catch (Exception ex) { octOcc = 0.0; } try { Double.TryParse(csv[10].Trim(), out novOcc); } catch (Exception ex) { novOcc = 0.0; } try { Double.TryParse(csv[11].Trim(), out decOcc); } catch (Exception ex) { decOcc = 0.0; } try { Double.TryParse(csv[24].Trim(), out octStay); } catch (Exception ex) { octStay = 0.0; } try { Double.TryParse(csv[25].Trim(), out novStay); } catch (Exception ex) { novStay = 0.0; } try { Double.TryParse(csv[26].Trim(), out decStay); } catch (Exception ex) { decStay = 0.0; } selectedRegion.addNewestQuarter(quarter, room, bed, employee, octOcc, novOcc, decOcc, octStay, novOcc, decOcc); } else { string n = csv[0].Replace(" (TR)", ""); selectRegion(n); area = true; } } else if (area) { string n = csv[0].Trim(); // string rString = csv[2].Trim().Replace(",", ""); if (rString.Equals("") != true) { room = int.Parse(rString); } else { room = 0; } string bString = csv[3].Trim().Replace(",", ""); if (bString.Equals("") != true) { bed = int.Parse(bString); } else { bed = 0; } string eString = csv[4].Trim().Replace(",", ""); if (eString.Equals("") != true) { employee = int.Parse(eString); } else { employee = 0; } try { // CultureInfo.InvariantCulture Double.TryParse(csv[9].Trim(), out octOcc); } catch (Exception ex) { octOcc = 0.0; } try { Double.TryParse(csv[10].Trim(), out novOcc); } catch (Exception ex) { novOcc = 0.0; } try { Double.TryParse(csv[11].Trim(), out decOcc); } catch (Exception ex) { decOcc = 0.0; } try { Double.TryParse(csv[24].Trim(), out octStay); } catch (Exception ex) { octStay = 0.0; } try { Double.TryParse(csv[25].Trim(), out novStay); } catch (Exception ex) { novStay = 0.0; } try { Double.TryParse(csv[26].Trim(), out decStay); } catch (Exception ex) { decStay = 0.0; } selectedRegion.addNewestQuarters(n, quarter, bed, room, employee, octOcc, novOcc, decOcc, octStay, novStay, decStay); } } } else { MessageBox.Show("To Convert Data for a New State, please select the option from the Import Menu", "Error adding data", MessageBoxButtons.OK); } } }
private static int?BatchProcessOakbrookChurch(CsvReader csv, DateTime date, int?fundid) { var db = DbUtil.Db; var cols = csv.GetFieldHeaders(); BundleHeader bh = null; var qf = from f in db.ContributionFunds where f.FundStatusId == 1 orderby f.FundId select f.FundId; while (csv.ReadNextRecord()) { if (csv[16] == "Credit") { if (bh != null) { BatchImportContributions.FinishBundle(bh); } bh = BatchImportContributions.GetBundleHeader(date, DateTime.Now); continue; } if (bh == null) { bh = BatchImportContributions.GetBundleHeader(date, DateTime.Now); } var bd = new BundleDetail { CreatedBy = db.UserId, CreatedDate = DateTime.Now, }; bd.Contribution = new Contribution { CreatedBy = db.UserId, CreatedDate = DateTime.Now, ContributionDate = date, FundId = fundid ?? qf.First(), ContributionStatusId = 0, ContributionTypeId = ContributionTypeCode.CheckCash, }; string ck, rt, ac; rt = csv[11]; ac = csv[13]; ck = csv[14]; bd.Contribution.ContributionAmount = csv[15].GetAmount(); bd.Contribution.CheckNo = ck; var eac = Util.Encrypt(rt + "|" + ac); var q = from kc in db.CardIdentifiers where kc.Id == eac select kc.PeopleId; var pid = q.SingleOrDefault(); if (pid != null) { bd.Contribution.PeopleId = pid; } bd.Contribution.BankAccount = eac; bh.BundleDetails.Add(bd); } BatchImportContributions.FinishBundle(bh); return(bh.BundleHeaderId); }
public CSVGridDataRecord(CsvReader reader, System.Globalization.CultureInfo cultureInfo) { _importCultureInfo = cultureInfo; _data = new string[reader.FieldCount]; reader.CopyCurrentRecordTo(_data); }
public bool ImportDealers( Stream fStream ) { DeleteAllObjects( "[DealerVideoLinks]" ); DeleteAllObjects( "[DealerPageLinks]" ); DeleteAllObjects( "[DealerFeaturedProducts]" ); DeleteAllObjects( "[Dealers]" ); var csvReader = new CsvReader( new StreamReader( fStream ), true ); while( csvReader.ReadNextRecord() ) { if( !csvReader["Dealer Name"].Any() ) { continue; } var dealer = new Dealer(); foreach( var header in csvReader.GetFieldHeaders() ) { string val = csvReader[header].Trim(); switch( header.ToLower() ) { case "dealer name": dealer.Name = val; break; case "url": dealer.URL = val ?? ""; if( !dealer.URL.Any() ) { throw new Exception( string.Format( "URL is required for {0}.", dealer.Name ) ); } if( database.Dealers.Any( d => d.URL == dealer.URL && d.DealerID != dealer.DealerID ) ) { throw new Exception( string.Format( "URL for {0} already exists.", dealer.Name ) ); } break; case "main content": dealer.MainContent = val ?? ""; break; case "featured video": { var fVideo = database.VideoLinks.FirstOrDefault( v => v.Display == val ); if( fVideo != null ) { dealer.FeaturedVideo = fVideo; } else { throw new Exception( string.Format( "Featured video cannot be found for {0}", dealer.Name ) ); } } break; case "product list": if( (val ?? "").Any() ) { var values = val.Split( ',' ); foreach( var indVal in values.Select( s => s.Trim() ) ) { var p = database.Serieses.FirstOrDefault( i => i.Name == indVal ); if( p != null ) { if( !dealer.DealerFeaturedProducts.Any( dfp => dfp.SeriesName == p.Name ) ) { dealer.DealerFeaturedProducts.Add( new DealerFeaturedProduct() { SeriesName = p.Name } ); } } } } else { dealer.DealerFeaturedProducts.Clear(); } break; case "video list": if( (val ?? "").Any() ) { var values = val.Split( ',' ); foreach( var indVal in values.Select( s => s.Trim() ) ) { var v = database.VideoLinks.FirstOrDefault( i => i.Display == indVal ); if( v != null ) { if( !dealer.VideoLinks.Any( dvl => dvl.VideoID == v.VideoID ) ) { dealer.VideoLinks.Add( v ); } } } } else { dealer.VideoLinks.Clear(); } break; case "page list": if( (val ?? "").Any() ) { var values = val.Split( ',' ); foreach( var indVal in values.Select( s => s.Trim() ) ) { var p = database.PageLinks.FirstOrDefault( i => i.Display == indVal ); if( p != null ) { if( !dealer.PageLinks.Any( dpl => dpl.PageID == p.PageID ) ) { dealer.PageLinks.Add( p ); } } } } else { dealer.PageLinks.Clear(); } break; case "product headline": dealer.ProductsHeadline = val; break; case "page headline": dealer.PagesHeadline = val; break; case "video headline": dealer.VideosHeadline = val; break; } } database.Dealers.Add( dealer ); } return database.SaveChanges() > 0; }
// EmployeesFromCsv: Given the raw text of the PSA CSV extract (as // obtained, for instance, from the GetCsv method), transform it into an // array of nicely-formatted Employee JSON objects. Note that these // Employees are NOT saved or otherwise processed by default. public async Task <Tuple <List <Employee>, List <string> > > EmployeesFromCsv( Stream csvTextStream, Encoding csvEncoding ) { // By default the content will not be read if it is not form or JSON // type so we need to use a stream reader to read the request body. // CsvReader expects a StreamReader anyways so we will use that. using (StreamReader reader = new StreamReader(csvTextStream, csvEncoding)) using (CsvReader csv = new CsvReader(reader, CultureInfo.InvariantCulture)) { // Use the ClassMap to map the headers in the Csv to the fields // of the Employee model. // TODO: Note there is a bug with CsvHelper when trimming fields // on an async read, hence the line commented out below. Until // this issue is fixed in a future release, we have had to add a // TrimAllStrings() extension, which is called below. // https://github.com/JoshClose/CsvHelper/issues/1400 csv.Configuration.TrimOptions = CsvHelper.Configuration .TrimOptions.InsideQuotes; // csv.Configuration.TypeConverterCache // .RemoveConverter<DateTime>(); // csv.Configuration.TypeConverterCache // .AddConverter<DateTime>(new CustomDateTimeConverter()); csv.Configuration.RegisterClassMap <PsaCsvMap>(); var goodRecords = new List <Employee>(); var badRecords = new List <string>(); var isRecordBad = false; var line = 1; csv.Configuration.BadDataFound = context => { isRecordBad = true; badRecords.Add(context.RawRecord); }; while (csv.Read()) { try { var record = csv.GetRecord <Employee>(); if (!isRecordBad) { // TODO: Remove this line (and possibly the whole // implementation) if the bug with CsvHelper // TrimOptions is resolved. record.TrimAllStrings(); goodRecords.Add(record); } } catch (Exception e) { var ExceptionText = $"Line {line}: Exception: {e}"; badRecords.Add(ExceptionText); } isRecordBad = false; line++; } return(Tuple.Create(goodRecords, badRecords)); } }
dynamic ReadRow(CsvReader reader) { throw new NotImplementedException(); }
public CsvEnumerable(string path) { streamReader = new StreamReader(path); csvReader = new CsvReader(streamReader, CultureInfo.InvariantCulture); }
public int PushCurrentLine(CsvReader reader, FlatFileLine lineToPush, DataTable dt, IDataLoadEventListener listener, FlatFileEventHandlers eventHandlers) { //skip the blank lines if (lineToPush.Cells.Length == 0 || lineToPush.Cells.All(h => h.IsBasicallyNull())) { return(0); } int headerCount = _headers.CountNotNull; //if the number of not empty headers doesn't match the headers in the data table if (dt.Columns.Count != headerCount) { if (!_haveComplainedAboutColumnMismatch) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Flat file '" + _fileToLoad.File.Name + "' line number '" + reader.Context.RawRow + "' had " + headerCount + " columns while the destination DataTable had " + dt.Columns.Count + " columns. This message apperas only once per file")); _haveComplainedAboutColumnMismatch = true; } } Dictionary <string, object> rowValues = new Dictionary <string, object>(); if (lineToPush.Cells.Length < headerCount) { if (!DealWithTooFewCellsOnCurrentLine(reader, lineToPush, listener, eventHandlers)) { return(0); } } bool haveIncremented_bufferOverrunsWhereColumnValueWasBlank = false; for (int i = 0; i < lineToPush.Cells.Length; i++) { //about to do a buffer overrun if (i >= _headers.Length) { if (lineToPush[i].IsBasicallyNull()) { if (!haveIncremented_bufferOverrunsWhereColumnValueWasBlank) { _bufferOverrunsWhereColumnValueWasBlank++; haveIncremented_bufferOverrunsWhereColumnValueWasBlank = true; } continue; //do not bother buffer overruning with null whitespace stuff } else { string errorMessage = string.Format("Column mismatch on line {0} of file '{1}', it has too many columns (expected {2} columns but line had {3})", reader.Context.RawRow, dt.TableName, _headers.Length, lineToPush.Cells.Length); if (_bufferOverrunsWhereColumnValueWasBlank > 0) { errorMessage += " ( " + _bufferOverrunsWhereColumnValueWasBlank + " Previously lines also suffered from buffer overruns but the overrunning values were empty so we had ignored them up until now)"; } listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, errorMessage)); eventHandlers.BadDataFound(lineToPush); break; } } //if we are ignoring this header if (_headers.IgnoreColumnsList.Contains(_headers[i])) { continue; } //its an empty header, dont bother populating it if (_headers[i].IsBasicallyNull()) { if (!lineToPush[i].IsBasicallyNull()) { throw new FileLoadException("The header at index " + i + " in flat file '" + dt.TableName + "' had no name but there was a value in the data column (on Line number " + reader.Context.RawRow + ")"); } else { continue; } } //sometimes flat files have ,NULL,NULL,"bob" in instead of ,,"bob" if (lineToPush[i].IsBasicallyNull()) { rowValues.Add(_headers[i], DBNull.Value); } else { object hackedValue = _hackValuesFunc(lineToPush[i]); if (hackedValue is string) { hackedValue = ((string)hackedValue).Trim(); } try { if (hackedValue is string s && typeDeciderFactory.Dictionary.ContainsKey(dt.Columns[_headers[i]].DataType)) { hackedValue = typeDeciderFactory.Dictionary[dt.Columns[_headers[i]].DataType].Parse(s); } rowValues.Add(_headers[i], hackedValue); } catch (Exception e) { throw new FileLoadException("Error reading file '" + dt.TableName + "'. Problem loading value " + lineToPush[i] + " into data table (on Line number " + reader.Context.RawRow + ") the header we were trying to populate was " + _headers[i] + " and was of datatype " + dt.Columns[_headers[i]].DataType, e); } } } if (!BadLines.Contains(reader.Context.RawRow)) { DataRow currentRow = dt.Rows.Add(); foreach (KeyValuePair <string, object> kvp in rowValues) { currentRow[kvp.Key] = kvp.Value; } return(1); } return(0); }
public CsvEnumerator(CsvReader reader, string[] headers) { _reader = reader; _headers = headers; }
private String sortPair(String filePathA, String filePathB) { String fileNameC = String.Format("row_sort_{0}_{1}.txt", tempFileKey, ++fileNumber); String filePathC = Path.Combine(tempDirectory, fileNameC); tempFiles.Add(filePathC); // master list of files to be cleaned up FileStream streamA = null, streamB = null, streamC = null; CsvReader readerA = null, readerB = null; CsvWriter writer = null; try { streamA = new FileStream(filePathA, FileMode.Open, FileAccess.Read); streamB = new FileStream(filePathB, FileMode.Open, FileAccess.Read); streamC = new FileStream(filePathC, FileMode.CreateNew, FileAccess.Write); readerA = new CsvReader(streamA, Encoding.UTF8); readerB = new CsvReader(streamB, Encoding.UTF8); writer = new CsvWriter(streamC, Encoding.UTF8); List <String> rowA = null; List <String> rowB = null; while (!readerA.EndOfStream || !readerB.EndOfStream) { if (!readerA.EndOfStream && rowA == null) { rowA = readerA.readRow(); } if (!readerB.EndOfStream && rowB == null) { rowB = readerB.readRow(); } if (rowA != null && rowB != null) { if (comparer.Compare(rowA, rowB) <= 0) { writer.writeRow(rowA); rowA = null; } else { writer.writeRow(rowB); rowB = null; } } else if (rowA != null) { writer.writeRow(rowA); rowA = null; } else if (rowB != null) { writer.writeRow(rowB); rowB = null; } } // Clears out any remaining lines if (rowA != null) { writer.writeRow(rowA); } if (rowB != null) { writer.writeRow(rowB); } } finally { if (readerA != null) { readerA.Dispose(); } if (readerB != null) { readerB.Dispose(); } if (writer != null) { writer.Dispose(); } if (streamA != null) { streamA.Dispose(); } if (streamB != null) { streamB.Dispose(); } if (streamC != null) { streamC.Dispose(); } } return(filePathC); }
private bool DealWithTooFewCellsOnCurrentLine(CsvReader reader, FlatFileLine lineToPush, IDataLoadEventListener listener, FlatFileEventHandlers eventHandlers) { if (!_attemptToResolveNewlinesInRecords) { //we read too little cell count but we don't want to solve the problem listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Too few columns on line " + reader.Context.RawRow + " of file '" + _fileToLoad + "', it has too many columns (expected " + _headers.Length + " columns but line had " + lineToPush.Cells.Length + ")." + (_bufferOverrunsWhereColumnValueWasBlank > 0 ? "( " + _bufferOverrunsWhereColumnValueWasBlank + " Previously lines also suffered from buffer overruns but the overrunning values were empty so we had ignored them up until now)" : ""))); eventHandlers.BadDataFound(lineToPush); //didn't bother trying to fix the problem return(false); } //We want to try to fix the problem by reading more data //Create a composite row List <string> newCells = new List <string>(lineToPush.Cells); //track what we are Reading incase it doesn't work var allPeekedLines = new List <FlatFileLine>(); do { FlatFileLine peekedLine; //try adding the next row if (reader.Read()) { peekedLine = new FlatFileLine(reader.Context); //peeked line was 'valid' on it's own if (peekedLine.Cells.Length >= _headers.Length) { //queue it for reprocessing PeekedRecord = peekedLine; //and mark everything else as bad AllBad(lineToPush, allPeekedLines, eventHandlers); return(false); } //peeked line was invalid (too short) so we can add it onto ourselves allPeekedLines.Add(peekedLine); } else { //Ran out of space in the file without fixing the problem so it's all bad AllBad(lineToPush, allPeekedLines, eventHandlers); //couldn't fix the problem return(false); } //add the peeked line to the current cells //add the first record as an extension of the last cell in current row if (peekedLine.Cells.Length != 0) { newCells[newCells.Count - 1] += Environment.NewLine + peekedLine.Cells[0]; } else { newCells[newCells.Count - 1] += Environment.NewLine; //the next line was completely blank! just add a new line } //add any further cells on after that newCells.AddRange(peekedLine.Cells.Skip(1)); } while (newCells.Count() < _headers.Length); //if we read too much or reached the end of the file if (newCells.Count() > _headers.Length) { AllBadExceptLastSoRequeueThatOne(lineToPush, allPeekedLines, eventHandlers); return(false); } if (newCells.Count() != _headers.Length) { throw new Exception("We didn't over read or reach end of file, how did we get here?"); } //we managed to create a full row lineToPush.Cells = newCells.ToArray(); //problem was fixed return(true); }
public static async Task ProcessSequence() { var initial_balance = "10000"; //get the data and ruleset from the exe var ddata = new DaslData(); var csv = new CsvReader(new StreamReader(Assembly.GetExecutingAssembly().GetManifestResourceStream("DaslTradingExample.GBP_USD.csv"))); var records = csv.GetRecords <TradingRecord>().ToList(); ddata.code = new StreamReader(Assembly.GetExecutingAssembly().GetManifestResourceStream("DaslTradingExample.trading_simulation.darl")).ReadToEnd(); //convert the csv records to a DaslSet. ddata.history = new DaslSet(); ddata.history.sampleTime = new TimeSpan(1, 0, 0, 0); // 1 day ddata.history.events = new List <DaslState>(); foreach (var r in records) { if (r == records.Last()) //records are in reverse order, set the initial value of the balance and add the first price { ddata.history.events.Add(new DaslState { timeStamp = DateTime.Parse(r.Date), values = new List <DarlVar> { new DarlVar { name = "price", Value = r.Price, dataType = DarlVar.DataType.numeric }, new DarlVar { name = "balance", dataType = DarlVar.DataType.numeric, Value = initial_balance } } }); } else // add the day's price { ddata.history.events.Add(new DaslState { timeStamp = DateTime.Parse(r.Date), values = new List <DarlVar> { new DarlVar { name = "price", Value = r.Price, dataType = DarlVar.DataType.numeric } } }); } } //send the data off to the simulator var valueString = JsonConvert.SerializeObject(ddata); var client = new HttpClient(); var response = await client.PostAsync("https://darl.ai/api/Linter/DaslSimulate", new StringContent(valueString, Encoding.UTF8, "application/json")); var resp = await response.Content.ReadAsStringAsync(); var returnedData = JsonConvert.DeserializeObject <DaslSet>(resp); //now write out the results as a csv file var outlist = new List <OutputRecord>(); foreach (var d in returnedData.events) { var sterling = d.values.Where(a => a.name == "sterling").First(); var ave3 = d.values.Where(a => a.name == "tradingrules.average3").First(); var ave9 = d.values.Where(a => a.name == "tradingrules.average9").First(); var price = d.values.Where(a => a.name == "price").First(); var newbalance = d.values.Where(a => a.name == "newbalance").First(); outlist.Add(new OutputRecord { price = price.values[0], date = d.timeStamp, sterling = sterling.unknown ? 0.0 : sterling.values[0], ave3 = ave3.unknown ? 0.0 : ave3.values[0], ave9 = ave9.unknown ? 0.0 : ave9.values[0], newbalance = newbalance.unknown ? 0.0 : newbalance.values[0], trade = d.values.Where(a => a.name == "trade").First().Value, transact = d.values.Where(a => a.name == "tradingsim.transact").First().Value }); } var csvout = new CsvWriter(new StreamWriter("results.csv")); csvout.WriteRecords(outlist); }
public static void DeleteCustomerPaymentProfileExec(String ApiLoginID, String ApiTransactionKey) { using (CsvReader csv = new CsvReader(new StreamReader(new FileStream(@"../../../CSV_DATA/DeleteCustomerPaymentProfile.csv", FileMode.Open)), true)) { Console.WriteLine("DeleteCustomerPaymentProfile Sample"); int flag = 0; int fieldCount = csv.FieldCount; string[] headers = csv.GetFieldHeaders(); //Append Data var item1 = DataAppend.ReadPrevData(); using (CsvFileWriter writer = new CsvFileWriter(new FileStream(@"../../../CSV_DATA/Outputfile.csv", FileMode.Open))) { while (csv.ReadNextRecord()) { ApiOperationBase <ANetApiRequest, ANetApiResponse> .RunEnvironment = AuthorizeNET.Environment.SANDBOX; ApiOperationBase <ANetApiRequest, ANetApiResponse> .MerchantAuthentication = new merchantAuthenticationType() { name = ApiLoginID, ItemElementName = ItemChoiceType.transactionKey, Item = ApiTransactionKey, }; string customerProfileId = null; string customerPaymentProfileId = null; string TestCaseId = null; for (int i = 0; i < fieldCount; i++) { switch (headers[i]) { case "TestCaseId": TestCaseId = csv[i]; break; case "customerProfileId": customerProfileId = csv[i]; break; case "customerPaymentProfileId": customerPaymentProfileId = csv[i]; break; default: break; } } CsvRow row = new CsvRow(); try { if (flag == 0) { row.Add("TestCaseId"); row.Add("APIName"); row.Add("Status"); row.Add("TimeStamp"); writer.WriteRow(row); flag = flag + 1; //Append Data foreach (var item in item1) { writer.WriteRow(item); } } //please update the subscriptionId according to your sandbox credentials var request = new deleteCustomerPaymentProfileRequest { customerProfileId = customerProfileId, customerPaymentProfileId = customerPaymentProfileId }; //Prepare Request var controller = new deleteCustomerPaymentProfileController(request); controller.Execute(); //Send Request to EndPoint deleteCustomerPaymentProfileResponse response = controller.GetApiResponse(); if (response != null && response.messages.resultCode == messageTypeEnum.Ok) { /*****************************/ try { //Assert.AreEqual(response.Id, customerProfileId); Console.WriteLine("Assertion Succeed! Valid CustomerPaymentProfile deleted."); CsvRow row1 = new CsvRow(); row1.Add("DCPP_00" + flag.ToString()); row1.Add("DeleteCustomerPaymentProfile"); row1.Add("Pass"); row1.Add(DateTime.Now.ToString("yyyy/MM/dd" + "::" + "HH:mm:ss:fff")); writer.WriteRow(row1); // Console.WriteLine("Success " + TestcaseID + " CustomerID : " + response.Id); flag = flag + 1; } catch { CsvRow row1 = new CsvRow(); row1.Add("DCPP_00" + flag.ToString()); row1.Add("DeleteCustomerPaymentProfile"); row1.Add("Fail"); row1.Add(DateTime.Now.ToString("yyyy/MM/dd" + "::" + "HH:mm:ss:fff")); writer.WriteRow(row1); Console.WriteLine("Assertion Failed! Invalid CustomerPaymentProfile deleted."); flag = flag + 1; } /*******************/ //if (response != null && response.messages.message != null) //{ // Console.WriteLine("Success, ResultCode : " + response.messages.resultCode.ToString()); //} } else { CsvRow row1 = new CsvRow(); row1.Add("DCPP_00" + flag.ToString()); row1.Add("DeleteCustomerPaymentProfile"); row1.Add("Fail"); row1.Add(DateTime.Now.ToString("yyyy/MM/dd" + "::" + "HH:mm:ss:fff")); writer.WriteRow(row1); } } //else if (response != null) //{ // Console.WriteLine("Error: " + response.messages.message[0].code + " " + response.messages.message[0].text); //} catch (Exception e) { CsvRow row2 = new CsvRow(); row2.Add("DCPP_00" + flag.ToString()); row2.Add("DeleteCustomerPaymentProfile"); row2.Add("Fail"); row2.Add(DateTime.Now.ToString("yyyy/MM/dd" + "::" + "HH:mm:ss:fff")); writer.WriteRow(row2); flag = flag + 1; Console.WriteLine(TestCaseId + " Error Message " + e.Message); } //return response; } } } }
/// <summary> /// Read all CSV in the path provided /// If they conform to the prefix "LP" or "TO" load data in to data model /// Return appropriate success / fail result /// </summary> /// <param name="path"></param> /// <returns></returns> public FileLoadResult LoadFileData(string path) { lpFiles = new List <LPFile>(); touFiles = new List <TOUFile>(); string[] filesInFolder; try { filesInFolder = Directory.GetFiles(path, "*.csv", SearchOption.TopDirectoryOnly); } catch (DirectoryNotFoundException) { return(FileLoadResult.InvalidPath); } catch (Exception) { throw; } if (filesInFolder.Length == 0) { return(FileLoadResult.NoFilesFound); } foreach (var file in filesInFolder) { FileInfo fi = new FileInfo(file); TextReader tr; try { tr = File.OpenText(file); } catch (IOException) { return(FileLoadResult.FileUsedByAnotherProgram); } catch (Exception) { throw; } var csv = new CsvReader(tr); csv.Configuration.RegisterClassMap <CSVMapperLP>(); csv.Configuration.RegisterClassMap <CSVMapperTOU>(); if (fi.Name.Substring(0, 2).ToUpper() == "LP") { lpFiles.Add(new LPFile { FileName = fi.Name, LPDataList = csv.GetRecords <LPData>().OrderBy(x => x.DataVal).ToList() }); } else if (fi.Name.Substring(0, 2).ToUpper() == "TO") { touFiles.Add(new TOUFile { FileName = fi.Name, TOUDataList = csv.GetRecords <TOUData>().OrderBy(x => x.Energy).ToList() }); } tr.Close(); } return(FileLoadResult.Success); }
/// <summary> /// Initializes a new instance using the given reader. /// </summary> /// <param name="reader">The reader.</param> public ExpressionManager(CsvReader reader) { this.reader = reader; }