}//end Main method /// <summary> /// Loading the csv dataset file into DatasetItems /// </summary> /// <param name="csvFile"></param> /// <returns></returns> public static IEnumerable<DatasetItem> ReadFromCsv(string csvFile) { IEnumerable<DatasetItem> datasetItemList = null; try { //Read From Csv var csvFileDescription = new CsvFileDescription { SeparatorChar = ',', //Specify the separator character. FirstLineHasColumnNames = false, FileCultureName = "en-US", // default is the current culture EnforceCsvColumnAttribute = true }; var csvContext = new CsvContext(); datasetItemList = csvContext.Read<DatasetItem>(csvFile, csvFileDescription); } catch (AggregatedException ae) { List<Exception> innerExceptionsList = (List<Exception>)ae.Data["InnerExceptionList"]; foreach (Exception e in innerExceptionsList) { Console.WriteLine(e.Message); } } return datasetItemList; }
public async Task WritesWithCustomAttributeConvertersAsync() { var writerService = new CsvWriterService(); var attributes = new List <CustomAttribute>(); for (var i = 0; i < 3; i++) { attributes.Add(new CustomAttribute { Value = $"Attribute{i + 1}" }); } var operations = new List <Operation>(); for (var i = 0; i < 5; i++) { var operation = new Operation { Id = i + 1, Name = $"Operation {i + 1}", Enabled = true }; for (var j = 0; j < 5; j++) { operation.Attributes[$"Attribute{j + 1}"] = $"Value {j + 1}"; } operations.Add(operation); } var temporaryFileContext = new TemporaryFilesContext($"{nameof(CsvWriterServiceFacts)}_{nameof(WritesWithCustomAttributeConvertersAsync)}"); var fileName = temporaryFileContext.GetFile("operations.csv"); var classMap = new OperationMap(); classMap.Initialize(attributes.Select(x => x.Value)); var csvContext = new CsvContext <Operation> { ClassMap = classMap }; using (var stream = File.Create(fileName)) { using (var textWriter = new StreamWriter(stream)) { var csvWriter = new CsvWriter(textWriter); csvWriter.Configuration.RegisterClassMap(classMap); csvWriter.WriteRecords(operations); } } await writerService.WriteRecordsAsync(operations, fileName, csvContext); Approvals.VerifyFile(fileName); }
private void browseButton_Click(object sender, EventArgs e) { OpenFileDialog ofd = new OpenFileDialog(); ofd.Filter = "CSV Files|*.csv"; if (DialogResult.OK == ofd.ShowDialog()) { pathTextBox.Text = ofd.FileName; string firstLine = null; using (StreamReader sr = new StreamReader(ofd.FileName)) { firstLine = sr.ReadLine(); } string[] titles = CsvContext.GetFields(firstLine, ","); RefreshColumns(); for (int i = 0; i < titles.Length; i++) { if (currentConfig.ContainsColumn(titles[i])) { currentConfig.Columns[i].Ordinal = i; } } mainDataGridView.DataSource = currentConfig.Columns; this.Refresh(); } }
public IEnumerable<ShipToAddressImport> Import(string path) { try { var inputFileDescription = new CsvFileDescription { // cool - I can specify my own separator! SeparatorChar = ',', FirstLineHasColumnNames = false, QuoteAllFields = true, EnforceCsvColumnAttribute = true }; CsvContext cc = new CsvContext(); var importEntities = cc.Read<ShipToAddressImport>(path, inputFileDescription); return importEntities; } catch (FileNotFoundException ex) { throw ex; } catch (FieldAccessException ex) { throw ex; } catch (Exception ex) { MessageBox.Show(ex.Message, "Importer Error", MessageBoxButton.OK, MessageBoxImage.Error); return null; } }
public dynamic GetPackagingDeliveryCharge(string item, int count) { if (count <= 0) { return(-1); } var CSVFile = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true }; int Charge = 0; var CSV = new CsvContext(); var Charges = from values in CSV.Read <Item>(@"./Items.csv", CSVFile) where (values.ItemType.Trim().ToUpper() == item.ToUpper()) select new { DeliveryCharge = values.Delivery, PackagingCharge = values.Packaging }; var Fee = Charges.Select(charge => charge.DeliveryCharge + charge.PackagingCharge).ToList(); foreach (int value in Fee) { Charge += value; } return(Charge * count); }
public ActionResult RateAnalyze(RateStatChart chart) { using (HttpFileImporter importer = new HttpFileImporter(Request.Files["fileUpload"])) { if (!importer.Success) { TempData["error"] = "请选择合适的路测数据导入!"; ViewBag.Title = "导入路测数据"; } else { TempData["Path"] = importer.FilePath; string extension = Path.GetExtension(importer.FileName); if (extension != null) { string fileExt = extension.ToLower(); List <BasicRateStat> rateStatList = (fileExt == ".txt") ? CsvContext.Read <LogRecord>( importer.Reader, CsvFileDescription.TabDescription).ToList().MergeStat().Merge() : CsvContext.Read <HugelandRecord>( importer.Reader, CsvFileDescription.CommaDescription).Select( x => x.Normalize()).ToList().MergeStat().Where( x => x.PdschRbRate > 0).Select(x => (BasicRateStat)x).ToList(); chart.Import(rateStatList); } ViewBag.Title = "路测速率指标分析"; TempData["success"] = "导入路测数据:" + importer.FileName + "成功!"; } } TempData["StatLength"] = chart.StatList.Count; return(View("RateImport")); }
public async Task <string> ImportDtVolteFile(string path) { var fields = path.Replace(".csv", "").GetSplittedFields('\\'); var tableName = fields[fields.Length - 1].DtFileNameEncode(); var reader = new StreamReader(path, Encoding.GetEncoding("GB2312")); var infos = CsvContext.Read <FileRecordVolteCsv>(reader, CsvFileDescription.CommaDescription).ToList(); reader.Close(); var filterInfos = infos.GetFoshanGeoPoints().ToList(); if (!filterInfos.Any()) { throw new Exception("无数据或格式错误!"); } _dtFileInfoRepository.UpdateCsvFileInfo(tableName, filterInfos[0].StatTime, "Volte"); var stats = filterInfos.MergeRecords(); if (!stats.Any()) { throw new Exception("无数据或格式错误!"); } _rasterTestInfoRepository.UpdateRasterInfo(stats, tableName, "Volte"); var count = await _fileRecordService.InsertFileRecordVoltes(stats, tableName); return("完成VoLTE路测文件导入:" + path + "(" + tableName + ")" + count + "条"); }
public void TestCoverageStatList_Hugeland() { HugelandDescriptionInitialize(); testInput = HugelandRecordExample; coverageStatList = CsvContext.ReadString <HugelandRecord>(testInput, fileDescription_namesUs).Select(x => { CoverageStat stat = new CoverageStat(); stat.Import(x); return(stat); }).ToList(); Assert.AreEqual(coverageStatList.Count, 19); Assert.AreEqual(coverageStatList[0].Longtitute, 113.13548); Assert.AreEqual(coverageStatList[0].Lattitute, 23.07062); Assert.AreEqual(coverageStatList[0].Rsrp, -93); Assert.AreEqual(coverageStatList[0].Sinr, 3.4); CoverageStatChart chart = new CoverageStatChart(); chart.Import(coverageStatList); Assert.AreEqual(chart.StatList.Count, 9); Assert.AreEqual(chart.StatList[0].Longtitute, 113.13548); Assert.AreEqual(chart.StatList[0].Lattitute, 23.07062); Assert.AreEqual(chart.StatList[0].Rsrp, -93); Assert.AreEqual(chart.StatList[1].Rsrp, -93.2, 1E-6); Assert.AreEqual(chart.StatList[2].Rsrp, -93.15); Assert.AreEqual(chart.StatList[3].Rsrp, -92.6); Assert.AreEqual(chart.StatList[4].Rsrp, -94.1); Assert.AreEqual(chart.StatList[5].Rsrp, -96.5); Assert.AreEqual(chart.StatList[6].Rsrp, -98.5); Assert.AreEqual(chart.StatList[7].Rsrp, -98.5); Assert.AreEqual(chart.StatList[8].Rsrp, -98.5); Assert.AreEqual(chart.StatList[0].Sinr, 3.4); Assert.AreEqual(chart.StatList[1].Sinr, 2.8); Assert.AreEqual(chart.StatList[2].Sinr, 2.55); }
public void ExportToCSV() { CsvContext cc = new CsvContext(); CsvFileDescription outputFileDescription = new CsvFileDescription { QuoteAllFields = false, SeparatorChar = ',', // tab delimited FirstLineHasColumnNames = true, FileCultureName = "en-US" }; // prepare invoices var exportSpareparts = from sp in View.SparepartListData select new { Tanggal = sp.SPK.CreateDate.ToString("yyyyMMdd"), Nopol = sp.SPK.Vehicle.ActiveLicenseNumber, Kode = sp.Sparepart.Code, Nama = sp.Sparepart.Name, Unit = sp.Sparepart.UnitReference.Name, Qty = sp.TotalQuantity, SubTotal = sp.TotalPrice, Kategori = sp.Category }; cc.Write(exportSpareparts, View.ExportFileName, outputFileDescription); }
public void TestCoverageStatList_Dingli() { DescriptionInitialize(); testInput = DingliRecordExample; coverageStatList = CsvContext.ReadString <LogRecord>(testInput, fileDescription_namesUs).Select(x => { CoverageStat stat = new CoverageStat(); stat.Import(x); return(stat); }).ToList(); Assert.AreEqual(coverageStatList.Count, 74); Assert.AreEqual(coverageStatList[0].Longtitute, -9999); Assert.AreEqual(coverageStatList[0].Rsrp, -97.31); Assert.AreEqual(coverageStatList[0].Sinr, 14.3); CoverageStatChart chart = new CoverageStatChart(); chart.Import(coverageStatList); Assert.AreEqual(chart.StatList.Count, 7); Assert.AreEqual(chart.StatList[0].Longtitute, 113.0001); Assert.AreEqual(chart.StatList[0].Lattitute, 23.0002); Assert.AreEqual(chart.StatList[0].Rsrp, -97.31); Assert.AreEqual(chart.StatList[1].Rsrp, -97.25); Assert.AreEqual(chart.StatList[2].Rsrp, -97.25); Assert.AreEqual(chart.StatList[3].Rsrp, -97.25); Assert.AreEqual(chart.StatList[4].Rsrp, -97.25); Assert.AreEqual(chart.StatList[5].Rsrp, -97.25); Assert.AreEqual(chart.StatList[6].Rsrp, -97.25); Assert.AreEqual(chart.StatList[0].Sinr, 14.3); Assert.AreEqual(chart.StatList[1].Sinr, 13.4); Assert.AreEqual(chart.StatList[2].Sinr, 13.4); Assert.AreEqual(chart.StatList[3].Sinr, 13.4); Assert.AreEqual(chart.StatList[4].Sinr, 13.4); Assert.AreEqual(chart.StatList[5].Sinr, 13.4); Assert.AreEqual(chart.StatList[6].Sinr, 13.4); }
}//end Main method /// <summary> /// Loading the csv dataset file into DatasetItems /// </summary> /// <param name="csvFile"></param> /// <returns></returns> public static IEnumerable <DatasetItem> ReadFromCsv(string csvFile) { IEnumerable <DatasetItem> datasetItemList = null; try { //Read From Csv var csvFileDescription = new CsvFileDescription { SeparatorChar = ',', //Specify the separator character. FirstLineHasColumnNames = false, FileCultureName = "en-US", // default is the current culture EnforceCsvColumnAttribute = true }; var csvContext = new CsvContext(); datasetItemList = csvContext.Read <DatasetItem>(csvFile, csvFileDescription); } catch (AggregatedException ae) { List <Exception> innerExceptionsList = (List <Exception>)ae.Data["InnerExceptionList"]; foreach (Exception e in innerExceptionsList) { Console.WriteLine(e.Message); } } return(datasetItemList); }
public static IEnumerable <FlowZteCsv> ReadFlowZteCsvs(StreamReader reader) { return (CsvContext.Read <FlowZteCsv>(reader, CsvFileDescription.CommaDescription) .ToList() .Where(x => !string.IsNullOrEmpty(x.Qci8DownlinkIpThroughputDuration))); }
//private void encodingComboBox_SelectedIndexChanged(object sender, EventArgs e) //{ // //Config.CodePage = ((MyEncoding)encodingComboBox.SelectedItem).CodePage; // Display(); //} #endregion private void Display() { if (pathTextBox.Text == string.Empty) { return; } DataTable dt = null; if (pathTextBox.Text.GetIsOleDb()) { if (tableComboBox.SelectedItem == null) { return; } dt = CsvContext.GetDataTable(pathTextBox.Text, tableComboBox.SelectedItem.ToString()); } else { dt = CsvContext.GetDataTable(pathTextBox.Text, Config.Delimiter, Encoding.GetEncoding(Config.CodePage)); } Config.Columns.Clear(); for (int i = 0; i < dt.Columns.Count; i++) { Config.Columns.Add(new CsvColumn { Ordinal = i, Name = dt.Columns[i].ColumnName }); } mainDataGridView.DataSource = dt; this.Refresh(); }
private string TestWrite <T>(IEnumerable <T> values, CsvFileDescription fileDescription) where T : class { TextWriter stream = new StringWriter(); CsvContext.Write(values, stream, fileDescription); return(stream.ToString()); }
public static Boolean importParishes(String filename) { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true }; CsvContext cc = new CsvContext(); HousingContext hdb = new HousingContext(); IEnumerable<ParishCSV> csvparishes = cc.Read<ParishCSV>(filename, inputFileDescription); List<Parish> parishes = hdb.parishes.ToList(); foreach (ParishCSV parish in csvparishes) { if (!parishes.Where(x => x.name == parish.name && x.city == parish.city && x.state == parish.state).Any()) { hdb.parishes.Add(new Parish { name = parish.name, city = parish.city, state = parish.state, femaleChaperones = parish.femaleChaperones, maleChaperones = parish.maleChaperones, femaleStudents = parish.femaleStudents, maleStudents = parish.maleStudents }); } } hdb.SaveChanges(); return true; }
public List <ManagerReport> getReportList(string path) { //string[] str = System.IO.Directory.GetFiles(path, "*.csv"); CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true, }; CsvContext cc = new CsvContext(); List <ManagerReport> rep = cc.Read <ManagerReport>(path, inputFileDescription).ToList <ManagerReport>(); string managerName = Path.GetFileName(path).Split('_').First(); string datestr = Path.GetFileName(path).Split('_').Last(); DateTime date = new DateTime(Int32.Parse(datestr.Substring(4, 4)), Int32.Parse(datestr.Substring(2, 2)), Int32.Parse(datestr.Substring(0, 2))); foreach (ManagerReport repos in rep) { repos.ManagerLastName = managerName; repos.ReportDate = date; } return(rep); }
public void ExportToCSV() { CsvContext cc = new CsvContext(); CsvFileDescription outputFileDescription = new CsvFileDescription { QuoteAllFields = false, SeparatorChar = ',', // tab delimited FirstLineHasColumnNames = true, FileCultureName = "en-US" }; // prepare invoices var exportVehicles = from ve in View.VehicleListData select new { Nopol = ve.ActiveLicenseNumber, Customer = ve.Customer.CompanyName, Kelompok = ve.VehicleGroup.Name, Merek = ve.Brand.Name, Tipe = ve.Type.Name, TahunPembelian = ve.YearOfPurchase }; cc.Write(exportVehicles, View.ExportFileName, outputFileDescription); }
public void ExportToCSV() { CsvContext cc = new CsvContext(); CsvFileDescription outputFileDescription = new CsvFileDescription { QuoteAllFields = false, SeparatorChar = ',', // tab delimited FirstLineHasColumnNames = true, FileCultureName = "en-US" }; // prepare invoices var exportRoles = from ro in View.UserRoleListData select new { NamaDepan = ro.User.FirstName, NamaBelakang = ro.User.LastName, Username = ro.User.UserName, Role = ro.Role.Name, StatusActive = ro.User.IsActive ? "Aktif" : "Tidak Aktif" }; cc.Write(exportRoles, View.ExportFileName, outputFileDescription); }
public void Test_Merge_2G() { var testDirectory = AppDomain.CurrentDomain.BaseDirectory; var csvFilesDirectory = Path.Combine(testDirectory, "CsvFiles"); var path = Path.Combine(csvFilesDirectory, "CDMA_20140114_禅城区_城区_短呼_姚华海_高基街_2.csv"); var reader = new StreamReader(path); var infos = CsvContext.Read <FileRecord2GCsv>(reader, CsvFileDescription.CommaDescription).ToList(); var filterInfos = infos.Where(x => x.Longtitute != null && x.Lattitute != null).ToList(); var stats = filterInfos.MergeRecords(); Assert.AreEqual(stats.Count, 121); Assert.AreEqual(stats[0].TestTimeString, "2014-1-14 14:39:39.999"); Assert.AreEqual(stats[0].GenerateInsertSql("CDMA_0601-110248锦龙医药dt_2_All"), "INSERT INTO [CDMA_0601-110248锦龙医药dt_2_All] ( [rasterNum],[testTime],[lon],[lat],[refPN],[EcIo],[rxAGC],[txAGC],[txPower],[txGain]) VALUES(4649,'2014-1-14 14:39:39.999',113.108658666667,23.0380863333333,282,-7.04,-71.58,NULL,NULL,NULL)"); Assert.AreEqual(stats[0].RxAgc ?? 0, -71.58, 1e-6); Assert.AreEqual(stats[0].Ecio ?? 0, -7.04, 1e-6); Assert.AreEqual(stats[0].TxAgc, null); Assert.AreEqual(stats[0].Pn, 282); Assert.AreEqual(stats[0].RasterNum, 4649); Assert.AreEqual(stats[1].TestTimeString, "2014-1-14 14:39:42.559"); Assert.AreEqual(stats[1].RxAgc ?? 0, -69.91, 1e-6); Assert.AreEqual(stats[1].Ecio ?? 0, -7.12, 1e-6); Assert.AreEqual(stats[0].TxAgc, null); Assert.AreEqual(stats[0].Pn, 282); Assert.AreEqual(stats[2].TestTimeString, "2014-1-14 14:39:45.119"); Assert.AreEqual(stats[3].TestTimeString, "2014-1-14 14:39:47.680"); }
public void ExportToCSV() { CsvContext cc = new CsvContext(); CsvFileDescription outputFileDescription = new CsvFileDescription { QuoteAllFields = false, SeparatorChar = ',', // tab delimited FirstLineHasColumnNames = true, FileCultureName = "en-US" }; // prepare invoices var exportGuestBooks = from gb in View.GuestBookListData select new { Nopol = gb.Vehicle.ActiveLicenseNumber, Customer = gb.Vehicle.Customer.CompanyName, Merek = gb.Vehicle.Brand.Name, Tipe = gb.Vehicle.Type.Name, WaktuKedatangan = gb.ArrivalTime, Keterangan = gb.Description }; cc.Write(exportGuestBooks, View.ExportFileName, outputFileDescription); }
public void ExportToCSV() { CsvContext cc = new CsvContext(); CsvFileDescription outputFileDescription = new CsvFileDescription { QuoteAllFields = false, SeparatorChar = ',', // tab delimited FirstLineHasColumnNames = true, FileCultureName = "en-US" }; // prepare invoices var exportCustomers = from cs in View.CustomerListData select new { Kode = cs.Code, Perusahaan = cs.CompanyName, Alamat = cs.Address, Kota = cs.City.Name, Telepon = cs.PhoneNumber, NamaKontak = cs.ContactPerson }; cc.Write(exportCustomers, View.ExportFileName, outputFileDescription); }
static void Main(string[] args) { Wget.Download(EntriesListFilePath.AbsoluteUri, string.Empty); CsvContext cc = new CsvContext(); //var WebRequest.Create(EntriesListFilePath) as HttpWebRequest; var fileStreamReader = new StreamReader(WebRequest.CreateHttp(EntriesListFilePath).GetResponse().GetResponseStream()); IEnumerable<YoutubeEntry> entries = cc.Read<YoutubeEntry>(fileStreamReader, YoutubeEntry.FileDescription); var videos = from e in entries select new { e.PageLink, e.MediaToDownload }; // Data is now available via variable products. //var productsByName = // from p in products // orderby p.Name // select new { p.Name, p.LaunchDate, p.Price, p.Description }; // or ... //foreach (YoutubeEntry item in entries) { Debug.WriteLine(item.PageLink.AbsoluteUri); } }
public string Transform(ModelMap mapToProcess, ModelProcess actionProcess, string input) { string result = string.Empty; try { CsvFileDescription outputFileDescription = new CsvFileDescription { SeparatorChar = ',', // tab delimited FirstLineHasColumnNames = false, // no column names in first record FileCultureName = "en-GB" // use formats used in The Netherlands }; CsvContext csvContext = new CsvContext(); string fileLocation = Path.Combine( new FileInfo(System.Reflection.Assembly.GetEntryAssembly().Location).Directory.FullName, actionProcess.Where); var theList = csvContext.Read<Data.DataRow>(fileLocation, outputFileDescription); var thePairs = theList.ToDictionary( d=>d[0].Value.ToString(), d =>d[1].Value.ToString()); if (thePairs.ContainsKey(input)) result = input != null ? thePairs[input] : "No Code Found"; } catch (Exception ex) { LoggerSingleton.Instance.LogMessage(ex); } return result; }
public async Task <string> ImportDt2GFile(string path) { var fields = path.Replace(".csv", "").GetSplittedFields('\\'); var tableName = fields[fields.Length - 1].DtFileNameEncode(); var reader = new StreamReader(path, Encoding.GetEncoding("GB2312")); var infos = CsvContext.Read <FileRecord2GCsv>(reader, CsvFileDescription.CommaDescription).ToList(); if (infos.FirstOrDefault(x => x.EcIo != null) == null) { var dingliInfos = CsvContext.Read <FileRecord2GDingli>(reader, CsvFileDescription.CommaDescription).ToList(); if (dingliInfos.FirstOrDefault(x => x.EcIo != null) == null) { throw new Exception("不是有效的2G数据文件!"); } infos = dingliInfos.MapTo <List <FileRecord2GCsv> >(); } reader.Close(); var filterInfos = infos.GetFoshanGeoPoints().ToList(); if (!filterInfos.Any()) { throw new Exception("无数据或格式错误!"); } _dtFileInfoRepository.UpdateCsvFileInfo(tableName, filterInfos[0].StatTime, "2G"); var stats = filterInfos.MergeRecords(); _rasterTestInfoRepository.UpdateRasterInfo(stats, tableName, "2G"); var count = await _fileRecordService.InsertFileRecord2Gs(stats, tableName); return("完成2G路测文件导入:" + path + "(" + tableName + ")" + count + "条"); }
public void AddToCsvRepoWorksWhenFileDoesNotExist(MockPoco input) { // Arrange var fileName = Path.GetRandomFileName(); var outputFile = new FileInfo(Path.ChangeExtension(fileName, "csv")); CsvContext <MockPoco> csvContext = null; CsvRepo <MockPoco> repository = null; try { csvContext = new CsvContext <MockPoco>(outputFile) { Culture = CultureInfo.InvariantCulture }; repository = new CsvRepo <MockPoco>(csvContext); // Act repository.Add(input); csvContext.SaveChanges(); var received = File.ReadAllText(outputFile.FullName); // Assert Assert.Contains(input.Value, received); } finally { repository?.Dispose(); outputFile.Delete(); } }
public string GetUiDataCsv(SprintProgressVm sprintProgressVm)//TODO convert to property on SprintProgressVm.cs { var separatorChar = ','; var csv = $"{sprintProgressVm.UiDataObject.IterationDetails.Iteration}{separatorChar}{sprintProgressVm.UiDataObject.IterationDetails.Start}{separatorChar}{sprintProgressVm.UiDataObject.IterationDetails.End}{Environment.NewLine}"; var csvFileDescription = new CsvFileDescription { SeparatorChar = separatorChar,// '\t' for tab delimited UseOutputFormatForParsingCsvValue = true }; using (var memoryStream = new MemoryStream()) { using (TextWriter textWriter = new StreamWriter(memoryStream)) { var csvContext = new CsvContext(); csvContext.Write(sprintProgressVm.UiDataObject.WorkItemData.ToList(), textWriter, csvFileDescription); textWriter.Flush(); memoryStream.Position = 0; csv += Encoding.ASCII.GetString(memoryStream.ToArray()); } } return(csv); }
public Tuple <bool, string> cargarListaFuncionario(string path) { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', //Indica qué es lo que separa cada valor en el archivo FirstLineHasColumnNames = true, //La primera fila corresponde a los títulos de los campos, no a un campo específico IgnoreUnknownColumns = true // Linea para evitar errores o algunos datos no desead0s }; CsvContext cc = new CsvContext(); //Este IEnumerable tiene cada modelo que fue llenado con los datos del CSV IEnumerable <ListaFuncionario> datos = cc.Read <ListaFuncionario>(path, inputFileDescription); List <ListaFuncionario> lista = datos.ToList(); //Se valida cada fila de CSV ValidadorListaFuncionarios val = new ValidadorListaFuncionarios(); bool error; int filaActual = 0; foreach (ListaFuncionario f in lista) { ++filaActual; error = val.Validar(f, filaActual).Item1; if (!error) { return(Tuple.Create(false, val.Validar(f, filaActual).Item2)); } } foreach (ListaFuncionario f in lista) { insertarListaFuncionario(f); } return(Tuple.Create(true, "")); }
internal static void FromCSV(string path, out PartList parts, out BoardList boards) { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true, UseFieldIndexForReadingData = true }; CsvContext cc = new CsvContext(); IEnumerable <CSVRecord> records = new List <CSVRecord>( cc.Read <CSVRecord>(path, inputFileDescription)); parts = new PartList(); boards = new BoardList(); foreach (var iline in records.Where(t => !t.ItemType.StartsWith("#"))) { if (iline.ItemType.ToLower() == "board") { boards.Append(new BoardNode(iline.PartID, double.Parse(iline.Length), double.Parse(iline.Width))); } else { parts.Append(new PartNode(iline.PartID, double.Parse(iline.Length), double.Parse(iline.Width))); } } }
public static void FromCutlistPlusCSV(string filePath, out PartList parts, out BoardList boards) { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true, UseFieldIndexForReadingData = true }; CsvContext cc = new CsvContext(); IEnumerable <CutListPlusCSVRecord> records = new List <CutListPlusCSVRecord>( cc.Read <CutListPlusCSVRecord>(filePath, inputFileDescription)); parts = new PartList(); boards = new BoardList(); foreach (var iline in records.Where(t => t.PartNumber != "Part #")) { if (iline.MaterialName == "Stock") { boards.Append(new BoardNode(iline.PartName, double.Parse(iline.Length.Replace("mm", "")), double.Parse(iline.Width.Replace("mm", "")))); } else { parts.Append(new PartNode(iline.PartName, double.Parse(iline.Length.Replace("mm", "")), double.Parse(iline.Width.Replace("mm", "")))); } } }
public string ImportDt4GDingli(string path) { bool fileExisted; var tableName = _fileRecordRepository.GetFileNameExisted(path, out fileExisted); if (fileExisted) { return("数据文件已存在于数据库中。请确认是否正确。"); } var reader = new StreamReader(path, Encoding.GetEncoding("GB2312")); var infos = CsvContext.Read <FileRecord4GDingli>(reader, CsvFileDescription.CommaDescription).ToList(); var filterInfos = infos.GetFoshanGeoPoints().ToList(); if (!filterInfos.Any()) { return("无数据或格式错误!"); } _dtFileInfoRepository.UpdateCsvFileInfo(tableName, filterInfos[0].StatTime); var stats = filterInfos.MergeRecords(); _rasterTestInfoRepository.UpdateRasterInfo(stats, tableName, "4G"); var count = _fileRecordRepository.InsertFileRecord4Gs(stats, tableName); return("完成4G路测文件导入:" + path + "(" + tableName + ")" + count + "条"); }
public void AutoMapWithExistingMapTest() { var context = new CsvContext(new CsvConfiguration(CultureInfo.InvariantCulture)); var existingMap = new SimpleMap(); context.Maps.Add(existingMap); var data = new { Simple = new Simple { Id = 1, Name = "one" } }; var map = context.AutoMap(data.GetType()); Assert.IsNotNull(map); Assert.AreEqual(0, map.MemberMaps.Count); Assert.AreEqual(1, map.ReferenceMaps.Count); // Since Simple is a reference on the anonymous object, the type won't // be re-used. Types which are created from automapping aren't added // to the list of registered maps either. Assert.IsNotInstanceOfType(map.ReferenceMaps[0].Data.Mapping, typeof(SimpleMap)); }
public void GetGets(MockPoco input) { // Arrange var fileName = Path.GetRandomFileName(); var outputFile = new FileInfo(Path.ChangeExtension(fileName, "csv")); CsvContext <MockPoco> csvContext = null; CsvRepo <MockPoco> repository = null; try { csvContext = new CsvContext <MockPoco>(outputFile) { Culture = CultureInfo.InvariantCulture }; repository = new CsvRepo <MockPoco>(csvContext); csvContext.Entities.Add(input); csvContext.SaveChanges(); // Act var received = repository.Get(input); // Assert Assert.Equal(input, received); } finally { repository?.Dispose(); outputFile.Delete(); } }
private void Initialize(string source) { if (!_fileService.Exists(source)) { AddValidationError($"File '{source}' not found"); return; } try { var csvContext = new CsvContext <object> { Culture = Culture }; _reader?.Dispose(); _reader = _csvReaderService.CreateReader(source, csvContext); } catch (Exception ex) { Log.Error(ex, $"Failed to initialize reader for data source '{Source}'"); _reader?.Dispose(); _reader = null; AddValidationError($"Failed to initialize reader: '{ex.Message}'"); } }
public void AddStock(Company company) { // Arrange var fileName = Path.GetRandomFileName(); var outputFile = new FileInfo(Path.ChangeExtension(fileName, "csv")); CsvContext <StockQuote> csvContext = null; CsvRepo <StockQuote> repository = null; try { csvContext = new StockCsvContext(outputFile) { Culture = CultureInfo.InvariantCulture }; repository = new CsvRepo <StockQuote>(csvContext); // Act repository.AddRange(company.Quotes); csvContext.SaveChanges(); var received = File.ReadAllText(outputFile.FullName); // Assert var actual = received.Split(Environment.NewLine).Length; var expected = company.Quotes.Count + 2; Assert.Equal(expected, actual); } finally { repository?.Dispose(); outputFile.Delete(); } }
private void LoadData() { try { //data courtesy of https://data.gov.in/catalog/company-master-data Status = "Loading CSV..."; var ctx = new CsvContext(); var desc = new CsvFileDescription { SeparatorChar = ',', IgnoreUnknownColumns = true }; _list = ctx.Read <CompanyInfo>("demodata.csv", desc) .OrderBy(i => i.CompanyName) .ToArray(); Status = $"Loaded {_list.Length:N0} entries"; SelectedEntry = _list.Skip(1000).First(); } catch (AggregatedException ex) { var sb = new StringBuilder(); foreach (var x in ex.m_InnerExceptionsList) { sb.AppendLine($" {x.Message}"); } Status = $"Error loading data. {sb}"; } catch (Exception ex) { Status = $"Error loading data. {ex.Message}"; } }
public async Task<IEnumerable<ImportInvetoryIssueToSalesman>> ImportAsync(string[] files) { return await Task.Factory.StartNew(() => { var distinct = files.Distinct(); var docs = new List<ImportInvetoryIssueToSalesman>(); var inputFileDescription = new CsvFileDescription { // cool - I can specify my own separator! SeparatorChar = ',', FirstLineHasColumnNames = false, QuoteAllFields = true, EnforceCsvColumnAttribute = true }; foreach (var path in distinct) { try { if (!File.Exists(path)) return null; var doc = new CsvContext().Read<ImportInvetoryIssueToSalesman>( path, inputFileDescription); if (doc.Any()) docs.AddRange(doc); } catch (FileNotFoundException ex) { MessageBox.Show("File not found on specified path:\n" + path); return null; } catch (FieldAccessException ex) { MessageBox.Show( "File cannot be accessed,is it in use by another application?", "Importer Error", MessageBoxButton.OK, MessageBoxImage.Stop); return null; } catch (Exception ex) { MessageBox.Show("Unknown Error:Details\n" + ex.Message, "Importer Error", MessageBoxButton.OK, MessageBoxImage.Error); return null; } } Messenger.Default.Send(DateTime.Now.ToString("hh:mm:ss") + string.Format("Loading files done.....")); return docs.AsEnumerable(); }); }
public void SaveToCsv(string filename) { var rows = TimeSeries.AsEnumerable().Select( sdr => new OneColumnRecord { Val = double.Parse(sdr[0].ToString()) }); CsvContext context = new CsvContext(); context.Write(rows, filename, CsvDesc); }
static void Main(string[] args) { if(args.Length > 1) { if (!File.Exists(args[0])) { Console.WriteLine("Cannot locate the file " + args[0]); return; } // We will assume for now that they will always pass in a , separated list. not a | or whatever else. CsvFileDescription inputFileDescript = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true // yep and we want ColumnNames too }; CsvContext cc = new CsvContext(); // Load the CSV file into a query'able list IEnumerable<Keywords> keysRead = cc.Read<Keywords>(args[0], inputFileDescript); // order the data how we want it, nix the dupes var GroupQuery = (from x in keysRead select x.AdGroup) .Distinct() .OrderBy(x => x); //Create a file object to store the output in var output = File.CreateText(args[1]); // Iterate through the ordered query - using the groups we scraped // and build an object for each group - then write it to the file // in proper JSON format foreach(var group in GroupQuery) { JObject kjs = new JObject( new JProperty(@group, new JArray( from x in keysRead where x.AdGroup == @group select new JValue(x.Name) ))); // Save the file. output.WriteLine(kjs.ToString()); } // close the file output.Close(); } else { Console.WriteLine("No file exists in the current path.\n Usage: Keyword_CSVParse.exe input_filename output_filename"); return; } Console.WriteLine("Done!"); }
public static void ReadFileWithExceptionHandling() { try { CsvContext cc = new CsvContext(); CsvFileDescription inputFileDescription = new CsvFileDescription { MaximumNbrExceptions = 50 // limit number of aggregated exceptions to 50 }; IEnumerable<Product> products = cc.Read<Product>("../../TestFiles/products.csv", inputFileDescription); // NOT SHOWN IN EXAMPLE IN ARTICLE foreach (var item in products) { Console.WriteLine(item); } // Do data processing // ........... } catch(AggregatedException ae) { // Process all exceptions generated while processing the file List<Exception> innerExceptionsList = (List<Exception>)ae.Data["InnerExceptionsList"]; foreach (Exception e in innerExceptionsList) { ShowErrorMessage(e.Message); } } catch(DuplicateFieldIndexException dfie) { // name of the class used with the Read method - in this case "Product" string typeName = Convert.ToString(dfie.Data["TypeName"]); // Names of the two fields or properties that have the same FieldIndex string fieldName = Convert.ToString(dfie.Data["FieldName"]); string fieldName2 = Convert.ToString(dfie.Data["FieldName2"]); // Actual FieldIndex that the two fields have in common int commonFieldIndex = Convert.ToInt32(dfie.Data["Index"]); // Do some processing with this information // ......... // Inform user of error situation ShowErrorMessage(dfie.Message); } catch(Exception e) { ShowErrorMessage(e.Message); } }
public void Init() { NetcoLogger.LoggerFactory = new ConsoleLoggerFactory(); const string credentialsFilePath = @"..\..\Files\AmazonCredentials.csv"; var cc = new CsvContext(); this.Config = cc.Read< TestConfig >( credentialsFilePath, new CsvFileDescription { FirstLineHasColumnNames = true, IgnoreUnknownColumns = true } ).FirstOrDefault(); if( this.Config != null ) this.AmazonFactory = new AmazonFactory( this.Config.AccessKeyId, this.Config.SecretAccessKeyId ); }
public void Init() { NetcoLogger.LoggerFactory = new ConsoleLoggerFactory(); const string credentialsFilePath = @"..\..\Files\BigCommerceCredentials.csv"; var cc = new CsvContext(); var testConfig = cc.Read< TestConfig >( credentialsFilePath, new CsvFileDescription { FirstLineHasColumnNames = true, IgnoreUnknownColumns = true } ).FirstOrDefault(); if( testConfig != null ) this.Config = new BigCommerceConfig( testConfig.ShopName, testConfig.UserName, testConfig.ApiKey ); }
public CharInitReader(string filePath) { this.filePath = filePath; context = new CsvContext(); description = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, TextEncoding = Encoding.GetEncoding(932) }; }
public void Import(string stream) { var inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true }; var cc = new CsvContext(); var monsterImports = cc.Read<MonsterImport>(stream, inputFileDescription); var monsterLevels = _monsterLevelService.GetMonsterLevel().ToDictionary(k => k.Name, v => v); var monsters = _monsterService.GetMonsters().ToDictionary(k => k.Name, v => v); var monsterList = new List<Monster>(); foreach (var monsterImport in monsterImports) { Monster monster = null; var name = monsterImport.Unit; if (monsters.ContainsKey(name)) { monster = monsters[name]; } else { monster = new Monster(); monster.Name = name; } monster.Attack = monsterImport.Attack; monster.Defence = monsterImport.Defense; monster.Price = monsterImport.Price; monster.Upkeep = monsterImport.Upkeep; monster.Element = EnumUtil.ParseEnum<Element>(monsterImport.Element); MonsterLevel monsterLevel; if (monsterLevels.ContainsKey(monsterImport.Uniqueness)) { monsterLevel = monsterLevels[monsterImport.Uniqueness]; } else { monsterLevel = new MonsterLevel { Active = true, Name = monsterImport.Uniqueness, Tier = monsterLevels.Count() + 1 }; monsterLevels.Add(monsterImport.Uniqueness, monsterLevel); } monster.MonsterLevel = monsterLevel; monsterList.Add(monster); } _monsterService.SaveMonsters(monsterList); }
public static void WriteResults(string resultsCsvFilePath, List<ResultCsv> results) { CsvFileDescription outputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, FileCultureName = "en-AU" }; CsvContext cc = new CsvContext(); cc.Write(results, resultsCsvFilePath, outputFileDescription); }
public FgFileReader(string filePath) { this.filePath = filePath; context = new CsvContext(); description = new CsvFileDescription { SeparatorChar = '\t', FirstLineHasColumnNames = false, //TODO ヘッダの末尾に\tがあるファイルがあるのでtrueにすると落ちる EnforceCsvColumnAttribute = true, TextEncoding = Encoding.GetEncoding(932) }; }
public override List<Expense> Parse(string content) { CsvContext cc = new CsvContext(); var rows = cc.Read<ChaseCreditItem>(content.ToReader()); return (from r in rows where !r.Description.Contains("Payment Thank You") select new Expense() { Amount = -r.Amount, Description = r.Description, TransactionDate = r.TransactionDate, PaymentMethod = PaymentMethod }).ToList(); }
public override List<Expense> Parse(string content) { CsvContext cc = new CsvContext(); var rows = cc.Read<CapitalOneItem>(content.ToReader()); return (from r in rows where !r.Description.Contains("ONLINE PYMT") select new Expense() { Amount = r.Debit ?? -r.Credit ?? 0, Description = r.Description, TransactionDate = r.TransactionDate, PaymentMethod = PaymentMethod }).ToList(); }
public void Init() { const string credentialsFilePath = @"..\..\Files\ShipStationCredentials.csv"; Log.Logger = new LoggerConfiguration() .Destructure.ToMaximumDepth( 100 ) .MinimumLevel.Verbose() .WriteTo.Console().CreateLogger(); NetcoLogger.LoggerFactory = new SerilogLoggerFactory( Log.Logger ); var cc = new CsvContext(); var testConfig = cc.Read< TestConfig >( credentialsFilePath, new CsvFileDescription { FirstLineHasColumnNames = true } ).FirstOrDefault(); if( testConfig != null ) this._credentials = new ShipStationCredentials( testConfig.ApiKey, testConfig.ApiSecret ); }
public string ProcessCSVs(string csv, string paymentMethod) { if (!csv.HasContent()) return null; int skipCount = 0; int badHintCount = 0; using (ShowcaseDB db = new ShowcaseDB()) { CsvContext cc = new CsvContext(); var csvFlags = new CsvFileDescription() { FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true }; var csvItems = cc.Read<AddTransactionItem>(csv.ToReader(), csvFlags).ToList(); //add expense items foreach (var itm in csvItems) { if (!db.Expenses.Any(e => e.TransactionDate == itm.TransactionDate && e.Description == itm.Description && e.Amount == itm.Amount && e.PaymentMethod == paymentMethod)) _expToAdd.Add(new Expense() { TransactionDate = itm.TransactionDate, Description = itm.Description, Amount = itm.Amount, Category = itm.Category, PaymentMethod = paymentMethod }); else skipCount++; } //add hints foreach (var itm in csvItems) { string kw = itm.NewKeyword; if (kw.HasContent()) { if (db.Expenses.Any(e => e.Description.Contains(kw) && e.Category != itm.Category) || db.Hints.Any(h => h.Keyword == kw) || _hintToAdd.Any(h => h.Keyword == kw)) badHintCount++; else _hintToAdd.Add(new Hint() { Keyword = itm.NewKeyword.Trim().ToLower(), Category = itm.Category }); } } Save(); } return string.Format("Duplicate expenses: {0}\r\nAmbiguous hints: {1}\r\n", skipCount, badHintCount); }
/// <summary> /// Creates a CSV string representation of the Eway credit card charge file /// </summary> public string ExportCreditCardChargeFile(int runNumber) { try { var invoices = _invoiceRepository.FindInvoicesByRunNumber(runNumber); var ewayExport = new List<EwayLineItem>(); foreach (var invoice in invoices) { var rootAccount = _accountRepository.GetAccount(invoice.InvoiceRootAccountId); if (rootAccount.BillingMethod != BillingMethod.CreditCard) continue; var rootContact = _contactRepository.GetContactEntity(rootAccount.ContactId); // 13/05/2012 - Moved eway line item creation to factory method. ewayExport.Add(EwayLineItem.Create(invoice, rootContact)); } CsvFileDescription fileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = false }; CsvContext context = new CsvContext(); using (Stream csvStream = new MemoryStream()) { var sw = new StreamWriter(csvStream); context.Write(ewayExport, sw, fileDescription); sw.Flush(); csvStream.Position = 0; var sr = new StreamReader(csvStream); string result = sr.ReadToEnd(); sr.Dispose(); return result; } } catch (Exception ex) { LoggingUtility.LogException(ex); } return string.Empty; }
public override List<Expense> Parse(string content) { CsvContext cc = new CsvContext(); var rows = cc.Read<CitiCreditItem>(content.ToReader(), new CsvFileDescription() { FirstLineHasColumnNames = true, EnforceCsvColumnAttribute = true }).ToList(); return (from r in rows where !r.Description.Contains("ONLINE PAYMENT") select new Expense() { Amount = r.Debit ?? -(r.Credit ?? 0), Description = r.Description, TransactionDate = r.Date, PaymentMethod = PaymentMethod }).ToList(); }
public static void readcards() { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, IgnoreTrailingSeparatorChar = true, IgnoreUnknownColumns = true }; CsvContext cc = new CsvContext(); IEnumerable<Card> allthecards = cc.Read<Card>(Path.Combine(Directory.GetCurrentDirectory(), "\\hd.csv"), inputFileDescription); foreach (Card c in allthecards) { Allcards.Add(c); } }
public AdditonalSmartsSettingsProvider() { _csvFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, EnforceCsvColumnAttribute = true }; _csvContext = new CsvContext(); _writequeued = false; _storage = new Storage(); _filename = _storage.CombineDocumentsFullPath(SettingsFileName); Read(); // do initial read _watcher = new FileSystemWatcher {Path=Path.GetDirectoryName(_filename), Filter = Path.GetFileName(_filename), NotifyFilter = NotifyFilters.LastWrite}; _watcher.Changed += OnChanged; _watcher.EnableRaisingEvents = true; }
public void ReadFromCsv(string filename) { TimeSeries = new DataTable(); CsvContext context = new CsvContext(); var tsRows = context.Read<OneColumnRecord>(filename, CsvDesc); foreach (var tsRow in tsRows) { if (TimeSeries.Columns.Count < 1) { TimeSeries.Columns.Add(); } var newRow = TimeSeries.NewRow(); newRow[0] = tsRow.Val; TimeSeries.Rows.Add(newRow); } }
public void Run() { Configure(); using (var session = NHibernateHelper.OpenSession()) { var cc = new CsvContext(); var articles = session.Query<Article>(); var flatArticles = articles.Select(a => Mapper.Map<FlatArticle>(a)); cc.Write(flatArticles, @"F:\PortalScrapeService\Data\articles.csv"); var commments = session.Query<Comment>(); var flatComments = commments.Select(c => Mapper.Map<FlatComment>(c)); cc.Write(flatComments, @"F:\PortalScrapeService\Data\comments.csv"); } }
public override List<Expense> Parse(string content) { //reduce " ,,,,,,, " to " , " while (content.Contains(",,")) content = content.Replace(",,", ","); CsvContext cc = new CsvContext(); var rows = cc.Read<AmExCostcoItem>(content.ToReader(), new CsvFileDescription() { FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true }); return (from r in rows where !r.Description.Contains("ONLINE PAYMENT - THANK YOU") select new Expense() { Amount = r.Amount ?? 0, Description = r.Description, TransactionDate = r.TransactionDate, PaymentMethod = PaymentMethod }).ToList(); }
public async Task<IEnumerable<ProductImport>> Import(string path) { return await Task.Factory.StartNew(() => { IEnumerable<ProductImport> ProductImports; try { var inputFileDescription = new CsvFileDescription { // cool - I can specify my own separator! SeparatorChar = '\t',//tab delimited FirstLineHasColumnNames = false, QuoteAllFields = true, EnforceCsvColumnAttribute = true }; CsvContext cc = new CsvContext(); ProductImports = cc.Read<ProductImport>(path, inputFileDescription); } catch (FileNotFoundException ex) { MessageBox.Show("File not found on specified path:\n" + path); return null; } catch (FieldAccessException ex) { MessageBox.Show("File cannot be accessed,is it in use by another application?", "Importer Error", MessageBoxButton.OK, MessageBoxImage.Stop); return null; } catch (Exception ex) { MessageBox.Show("Unknown Error:Details\n" + ex.Message, "Importer Error", MessageBoxButton.OK, MessageBoxImage.Error); return null; } return ProductImports; }); }
// a helper to read the CSV private static void ReadCSV() { try { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true }; CsvContext cc = new CsvContext(); ConfigCsv = cc.Read<CsvModel>(AppDomain.CurrentDomain.BaseDirectory + "\\config.csv", inputFileDescription).FirstOrDefault<CsvModel>(); } catch (Exception exc) { Logger.LogExceptions("Exception - Reading config.CSV file", exc); throw; } }
public StockPredictionsFileReader(string fileName) { _fileName = fileName; CsvContext cc = new CsvContext(); CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, FileCultureName = "en" }; var filecontent = cc.Read<StockPrediction>(_fileName, inputFileDescription).ToList(); _dates = filecontent.Select(x => x.Date).ToArray(); _accual = filecontent.Select(x => x.Accual).ToArray(); _predictied = filecontent.Select(x => x.Predicted).ToArray(); _networkFitness = filecontent.Select(x => x.NetworkFintess).ToArray(); _cnt = filecontent.Count(); }
public ActionResult Index(HttpPostedFileBase file) { if (file == null) ModelState.AddModelError(string.Empty, "No file was uploaded"); if (ModelState.IsValid) { var ctx = new CsvContext(); var reader = new StreamReader(file.InputStream); var latlongs = ctx.Read<LatLong>(reader); return View(latlongs); } else { return View(new List<LatLong>()); } }