/// /////////////////////////////////////////////////////////////////////// /// Write /// public void Write <T>(IEnumerable <T> values, string fileName, CsvFileDescription fileDescription) { using (var sw = new StreamWriter(fileName, false, fileDescription.TextEncoding)) { WriteData(values, fileName, sw, fileDescription); } }
/// <summary> /// Ases the data table. /// </summary> /// <param name="stream">The stream.</param> /// <param name="fileDescription">The file description.</param> /// <param name="sort">The sort.</param> /// <returns></returns> /// <exception cref="BadFirstLineHasColumnNamesValueException"></exception> public System.Data.DataTable AsDataTable(StreamReader stream, CsvFileDescription fileDescription, string sort = null) { bool useFirstRow = false; //Throw error if we state we use columnnames AND we have skip first line if (fileDescription.SkipFirstLine && fileDescription.FirstLineHasColumnNames) { throw new BadFirstLineHasColumnNamesValueException(); } //If we have first line for column headers, we need to disable this for the processing as we need to read that row to build our column names if (fileDescription.FirstLineHasColumnNames) { useFirstRow = true; //Disable this - otherwise the first row is skipped the first row so we can create datacolumns using the info fileDescription.FirstLineHasColumnNames = false; } IEnumerable <DataSetDataRowItem> ie = ReadData <DataSetDataRowItem>(null, stream, fileDescription); DataSet results = CreateDataSet(ie, useFirstRow, fileDescription.FileCultureInfo); DataTable resultsTable = results.Tables[0]; if (!string.IsNullOrEmpty(sort)) { resultsTable = Sort(resultsTable, sort); } return(resultsTable); }
public static void Write <T>(IEnumerable <T> values, string fileName, CsvFileDescription fileDescription) { using (var sw = new StreamWriter(File.Open(fileName, FileMode.OpenOrCreate), fileDescription.TextEncoding)) { WriteData(values, fileName, sw, fileDescription); } }
/// /////////////////////////////////////////////////////////////////////// /// FieldMapper /// /// <summary> /// Constructor /// </summary> /// <param name="fileDescription"></param> public FieldMapper_Reading( CsvFileDescription fileDescription, string fileName, bool writingFile) : base(fileDescription, fileName, writingFile) { }
private void WriteData <T>( IEnumerable <T> values, string fileName, TextWriter stream, CsvFileDescription fileDescription) { FieldMapper <T> fm = new FieldMapper <T>(fileDescription, fileName, true); CsvStream cs = new CsvStream(null, stream, fileDescription.SeparatorChar, fileDescription.IgnoreTrailingSeparatorChar); List <string> row = new List <string>(); // If first line has to carry the field names, write the field names now. if (fileDescription.FirstLineHasColumnNames) { fm.WriteNames(row); cs.WriteRow(row, fileDescription.QuoteAllFields); } // ----- foreach (T obj in values) { // Convert obj to row fm.WriteObject(obj, row); cs.WriteRow(row, fileDescription.QuoteAllFields); } }
public IEnumerable<ShipToAddressImport> Import(string path) { try { var inputFileDescription = new CsvFileDescription { // cool - I can specify my own separator! SeparatorChar = ',', FirstLineHasColumnNames = false, QuoteAllFields = true, EnforceCsvColumnAttribute = true }; CsvContext cc = new CsvContext(); var importEntities = cc.Read<ShipToAddressImport>(path, inputFileDescription); return importEntities; } catch (FileNotFoundException ex) { throw ex; } catch (FieldAccessException ex) { throw ex; } catch (Exception ex) { MessageBox.Show(ex.Message, "Importer Error", MessageBoxButton.OK, MessageBoxImage.Error); return null; } }
public void GoodFileUsingOutputFormatForParsingDatesCharUSEnglish() { // Arrange CsvFileDescription fileDescription_namesUs = new CsvFileDescription { SeparatorChar = ';', FirstLineHasColumnNames = false, UseOutputFormatForParsingCsvValue = true, EnforceCsvColumnAttribute = true, // default is false FileCultureName = "en-US" // default is the current culture }; string testInput = "AAAAAAAA;052308" + Environment.NewLine + "BBBBBBBB;051212" + Environment.NewLine + "CCCCCCCC;122308"; var expected = new[] { new ProductDataParsingOutputFormat() { name = "AAAAAAAA", startDate = new DateTime(2008, 5, 23), }, new ProductDataParsingOutputFormat { name = "BBBBBBBB", startDate = new DateTime(2012, 5, 12), }, new ProductDataParsingOutputFormat { name = "CCCCCCCC", startDate = new DateTime(2008, 12, 23), } }; // Act and Assert AssertRead(testInput, fileDescription_namesUs, expected); }
public static Boolean importParishes(String filename) { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true }; CsvContext cc = new CsvContext(); HousingContext hdb = new HousingContext(); IEnumerable<ParishCSV> csvparishes = cc.Read<ParishCSV>(filename, inputFileDescription); List<Parish> parishes = hdb.parishes.ToList(); foreach (ParishCSV parish in csvparishes) { if (!parishes.Where(x => x.name == parish.name && x.city == parish.city && x.state == parish.state).Any()) { hdb.parishes.Add(new Parish { name = parish.name, city = parish.city, state = parish.state, femaleChaperones = parish.femaleChaperones, maleChaperones = parish.maleChaperones, femaleStudents = parish.femaleStudents, maleStudents = parish.maleStudents }); } } hdb.SaveChanges(); return true; }
public void Write <T>( IEnumerable <T> values, TextWriter stream, CsvFileDescription fileDescription) { WriteData <T>(values, null, stream, fileDescription); }
public void GoodFileCommaDelimitedUseFieldIndexForReadingDataCharUSEnglish() { // Arrange CsvFileDescription fileDescription_namesUs = new CsvFileDescription { SeparatorChar = ',', IgnoreMissingColumns = true, UseFieldIndexForReadingData = true, FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true, // default is false FileCultureName = "en-US" // default is the current culture }; string testInput = "AAAAAAAA,__,34.184,05/23/08" + Environment.NewLine + "BBBBBBBB,__,10.311,05/12/12" + Environment.NewLine + "CCCCCCCC,__,12.000,12/23/08"; var expected = new[] { new ProductDataSpecificFieldIndex() { name = "AAAAAAAA", weight = 34.184, startDate = new DateTime(2008, 5, 23), }, new ProductDataSpecificFieldIndex { name = "BBBBBBBB", weight = 10.311, startDate = new DateTime(2012, 5, 12), }, new ProductDataSpecificFieldIndex { name = "CCCCCCCC", weight = 12.000, startDate = new DateTime(2008, 12, 23), } }; // Act and Assert AssertRead(testInput, fileDescription_namesUs, expected); }
public void GoodFileCommaDelimitedNamesInFirstLineNLnl() { // Arrange List<ProductData> dataRows_Test = new List<ProductData>(); dataRows_Test.Add(new ProductData { retailPrice = 4.59M, name = "Wooden toy", startDate = DateTime.Parse("1/2/2008"), nbrAvailable = 67 }); dataRows_Test.Add(new ProductData { onsale = true, weight = 4.03, shopsAvailable = "Ashfield", description = "" }); dataRows_Test.Add(new ProductData { name = "Metal box", launchTime = DateTime.Parse("5/11/2009 4:50"), description = "Great\nproduct" }); CsvFileDescription fileDescription_namesNl2 = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, EnforceCsvColumnAttribute = false, TextEncoding = Encoding.Unicode, FileCultureName = "nl-Nl" // default is the current culture }; string expected = @"name,startDate,launchTime,weight,shopsAvailable,code,price,onsale,description,nbrAvailable,unusedField Wooden toy,1-2-2008,01 jan 00:00:00,""000,000"",,0,""€ 4,59"",False,,67, ,1-1-0001,01 jan 00:00:00,""004,030"",Ashfield,0,""€ 0,00"",True,"""",0, Metal box,1-1-0001,05 nov 04:50:00,""000,000"",,0,""€ 0,00"",False,""Great product"",0, "; // Act and Assert AssertWrite(dataRows_Test, fileDescription_namesNl2, expected); }
public string Transform(ModelMap mapToProcess, ModelProcess actionProcess, string input) { string result = string.Empty; try { CsvFileDescription outputFileDescription = new CsvFileDescription { SeparatorChar = ',', // tab delimited FirstLineHasColumnNames = false, // no column names in first record FileCultureName = "en-GB" // use formats used in The Netherlands }; CsvContext csvContext = new CsvContext(); string fileLocation = Path.Combine( new FileInfo(System.Reflection.Assembly.GetEntryAssembly().Location).Directory.FullName, actionProcess.Where); var theList = csvContext.Read<Data.DataRow>(fileLocation, outputFileDescription); var thePairs = theList.ToDictionary( d=>d[0].Value.ToString(), d =>d[1].Value.ToString()); if (thePairs.ContainsKey(input)) result = input != null ? thePairs[input] : "No Code Found"; } catch (Exception ex) { LoggerSingleton.Instance.LogMessage(ex); } return result; }
public void GoodFileNoSeparatorCharUseOutputFormatForParsingUSEnglish() { // Arrange CsvFileDescription fileDescription_namesUs = new CsvFileDescription { NoSeparatorChar = true, UseOutputFormatForParsingCsvValue = true, FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true, // default is false FileCultureName = "en-US" // default is the current culture }; string testInput = @"AAAAAAAA34.18405/23/08\n BBBBBBBB10.31105/12/12\n CCCCCCCC12.00012/23/08"; var expected = new[] { new ProductDataCharLength() { name = "AAAAAAAA", weight = 34.184, startDate = new DateTime(2008, 5, 23), }, new ProductDataCharLength { name = "BBBBBBBB", weight = 10.311, startDate = new DateTime(2012, 5, 12), }, new ProductDataCharLength { name = "CCCCCCCC", weight = 12.000, startDate = new DateTime(2008, 12, 23), } }; // Act and Assert AssertRead(testInput, fileDescription_namesUs, expected); }
}//end Main method /// <summary> /// Loading the csv dataset file into DatasetItems /// </summary> /// <param name="csvFile"></param> /// <returns></returns> public static IEnumerable<DatasetItem> ReadFromCsv(string csvFile) { IEnumerable<DatasetItem> datasetItemList = null; try { //Read From Csv var csvFileDescription = new CsvFileDescription { SeparatorChar = ',', //Specify the separator character. FirstLineHasColumnNames = false, FileCultureName = "en-US", // default is the current culture EnforceCsvColumnAttribute = true }; var csvContext = new CsvContext(); datasetItemList = csvContext.Read<DatasetItem>(csvFile, csvFileDescription); } catch (AggregatedException ae) { List<Exception> innerExceptionsList = (List<Exception>)ae.Data["InnerExceptionList"]; foreach (Exception e in innerExceptionsList) { Console.WriteLine(e.Message); } } return datasetItemList; }
public async Task<IEnumerable<ImportInvetoryIssueToSalesman>> ImportAsync(string[] files) { return await Task.Factory.StartNew(() => { var distinct = files.Distinct(); var docs = new List<ImportInvetoryIssueToSalesman>(); var inputFileDescription = new CsvFileDescription { // cool - I can specify my own separator! SeparatorChar = ',', FirstLineHasColumnNames = false, QuoteAllFields = true, EnforceCsvColumnAttribute = true }; foreach (var path in distinct) { try { if (!File.Exists(path)) return null; var doc = new CsvContext().Read<ImportInvetoryIssueToSalesman>( path, inputFileDescription); if (doc.Any()) docs.AddRange(doc); } catch (FileNotFoundException ex) { MessageBox.Show("File not found on specified path:\n" + path); return null; } catch (FieldAccessException ex) { MessageBox.Show( "File cannot be accessed,is it in use by another application?", "Importer Error", MessageBoxButton.OK, MessageBoxImage.Stop); return null; } catch (Exception ex) { MessageBox.Show("Unknown Error:Details\n" + ex.Message, "Importer Error", MessageBoxButton.OK, MessageBoxImage.Error); return null; } } Messenger.Default.Send(DateTime.Now.ToString("hh:mm:ss") + string.Format("Loading files done.....")); return docs.AsEnumerable(); }); }
static void Main(string[] args) { if(args.Length > 1) { if (!File.Exists(args[0])) { Console.WriteLine("Cannot locate the file " + args[0]); return; } // We will assume for now that they will always pass in a , separated list. not a | or whatever else. CsvFileDescription inputFileDescript = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true // yep and we want ColumnNames too }; CsvContext cc = new CsvContext(); // Load the CSV file into a query'able list IEnumerable<Keywords> keysRead = cc.Read<Keywords>(args[0], inputFileDescript); // order the data how we want it, nix the dupes var GroupQuery = (from x in keysRead select x.AdGroup) .Distinct() .OrderBy(x => x); //Create a file object to store the output in var output = File.CreateText(args[1]); // Iterate through the ordered query - using the groups we scraped // and build an object for each group - then write it to the file // in proper JSON format foreach(var group in GroupQuery) { JObject kjs = new JObject( new JProperty(@group, new JArray( from x in keysRead where x.AdGroup == @group select new JValue(x.Name) ))); // Save the file. output.WriteLine(kjs.ToString()); } // close the file output.Close(); } else { Console.WriteLine("No file exists in the current path.\n Usage: Keyword_CSVParse.exe input_filename output_filename"); return; } Console.WriteLine("Done!"); }
/// /////////////////////////////////////////////////////////////////////// /// Read /// /// <summary> /// Reads the comma separated values from a stream or file. /// Returns the data into an IEnumerable<T> that can be used for LINQ queries. /// /// The stream or file will be closed after the last line has been processed. /// Because the library implements deferred reading (using Yield Return), this may not happen /// for a while. /// </summary> /// <typeparam name="T"> /// The records in the returned IEnumerable<T> will be of this type. /// </typeparam> /// <param name="stream"> /// The data will be read from this stream. /// </param> /// <param name="fileDescription"> /// Additional information how the input file is to be interpreted, such as the culture of the input dates. /// </param> /// <returns> /// Values read from the stream or file. /// </returns> public IEnumerable <T> Read <T>(string fileName, CsvFileDescription fileDescription) where T : class, new() { // Note that ReadData will not be called right away, but when the returned // IEnumerable<T> actually gets accessed. IEnumerable <T> ie = ReadData <T>(fileName, null, fileDescription); return(ie); }
public static void ReadFileWithExceptionHandling() { try { CsvContext cc = new CsvContext(); CsvFileDescription inputFileDescription = new CsvFileDescription { MaximumNbrExceptions = 50 // limit number of aggregated exceptions to 50 }; IEnumerable<Product> products = cc.Read<Product>("../../TestFiles/products.csv", inputFileDescription); // NOT SHOWN IN EXAMPLE IN ARTICLE foreach (var item in products) { Console.WriteLine(item); } // Do data processing // ........... } catch(AggregatedException ae) { // Process all exceptions generated while processing the file List<Exception> innerExceptionsList = (List<Exception>)ae.Data["InnerExceptionsList"]; foreach (Exception e in innerExceptionsList) { ShowErrorMessage(e.Message); } } catch(DuplicateFieldIndexException dfie) { // name of the class used with the Read method - in this case "Product" string typeName = Convert.ToString(dfie.Data["TypeName"]); // Names of the two fields or properties that have the same FieldIndex string fieldName = Convert.ToString(dfie.Data["FieldName"]); string fieldName2 = Convert.ToString(dfie.Data["FieldName2"]); // Actual FieldIndex that the two fields have in common int commonFieldIndex = Convert.ToInt32(dfie.Data["Index"]); // Do some processing with this information // ......... // Inform user of error situation ShowErrorMessage(dfie.Message); } catch(Exception e) { ShowErrorMessage(e.Message); } }
public CharInitReader(string filePath) { this.filePath = filePath; context = new CsvContext(); description = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, TextEncoding = Encoding.GetEncoding(932) }; }
public void Import(string stream) { var inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true }; var cc = new CsvContext(); var monsterImports = cc.Read<MonsterImport>(stream, inputFileDescription); var monsterLevels = _monsterLevelService.GetMonsterLevel().ToDictionary(k => k.Name, v => v); var monsters = _monsterService.GetMonsters().ToDictionary(k => k.Name, v => v); var monsterList = new List<Monster>(); foreach (var monsterImport in monsterImports) { Monster monster = null; var name = monsterImport.Unit; if (monsters.ContainsKey(name)) { monster = monsters[name]; } else { monster = new Monster(); monster.Name = name; } monster.Attack = monsterImport.Attack; monster.Defence = monsterImport.Defense; monster.Price = monsterImport.Price; monster.Upkeep = monsterImport.Upkeep; monster.Element = EnumUtil.ParseEnum<Element>(monsterImport.Element); MonsterLevel monsterLevel; if (monsterLevels.ContainsKey(monsterImport.Uniqueness)) { monsterLevel = monsterLevels[monsterImport.Uniqueness]; } else { monsterLevel = new MonsterLevel { Active = true, Name = monsterImport.Uniqueness, Tier = monsterLevels.Count() + 1 }; monsterLevels.Add(monsterImport.Uniqueness, monsterLevel); } monster.MonsterLevel = monsterLevel; monsterList.Add(monster); } _monsterService.SaveMonsters(monsterList); }
public FgFileReader(string filePath) { this.filePath = filePath; context = new CsvContext(); description = new CsvFileDescription { SeparatorChar = '\t', FirstLineHasColumnNames = false, //TODO ヘッダの末尾に\tがあるファイルがあるのでtrueにすると落ちる EnforceCsvColumnAttribute = true, TextEncoding = Encoding.GetEncoding(932) }; }
public static void WriteResults(string resultsCsvFilePath, List<ResultCsv> results) { CsvFileDescription outputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, FileCultureName = "en-AU" }; CsvContext cc = new CsvContext(); cc.Write(results, resultsCsvFilePath, outputFileDescription); }
public string ProcessCSVs(string csv, string paymentMethod) { if (!csv.HasContent()) return null; int skipCount = 0; int badHintCount = 0; using (ShowcaseDB db = new ShowcaseDB()) { CsvContext cc = new CsvContext(); var csvFlags = new CsvFileDescription() { FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true }; var csvItems = cc.Read<AddTransactionItem>(csv.ToReader(), csvFlags).ToList(); //add expense items foreach (var itm in csvItems) { if (!db.Expenses.Any(e => e.TransactionDate == itm.TransactionDate && e.Description == itm.Description && e.Amount == itm.Amount && e.PaymentMethod == paymentMethod)) _expToAdd.Add(new Expense() { TransactionDate = itm.TransactionDate, Description = itm.Description, Amount = itm.Amount, Category = itm.Category, PaymentMethod = paymentMethod }); else skipCount++; } //add hints foreach (var itm in csvItems) { string kw = itm.NewKeyword; if (kw.HasContent()) { if (db.Expenses.Any(e => e.Description.Contains(kw) && e.Category != itm.Category) || db.Hints.Any(h => h.Keyword == kw) || _hintToAdd.Any(h => h.Keyword == kw)) badHintCount++; else _hintToAdd.Add(new Hint() { Keyword = itm.NewKeyword.Trim().ToLower(), Category = itm.Category }); } } Save(); } return string.Format("Duplicate expenses: {0}\r\nAmbiguous hints: {1}\r\n", skipCount, badHintCount); }
/// <summary> /// Creates a CSV string representation of the Eway credit card charge file /// </summary> public string ExportCreditCardChargeFile(int runNumber) { try { var invoices = _invoiceRepository.FindInvoicesByRunNumber(runNumber); var ewayExport = new List<EwayLineItem>(); foreach (var invoice in invoices) { var rootAccount = _accountRepository.GetAccount(invoice.InvoiceRootAccountId); if (rootAccount.BillingMethod != BillingMethod.CreditCard) continue; var rootContact = _contactRepository.GetContactEntity(rootAccount.ContactId); // 13/05/2012 - Moved eway line item creation to factory method. ewayExport.Add(EwayLineItem.Create(invoice, rootContact)); } CsvFileDescription fileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = false }; CsvContext context = new CsvContext(); using (Stream csvStream = new MemoryStream()) { var sw = new StreamWriter(csvStream); context.Write(ewayExport, sw, fileDescription); sw.Flush(); csvStream.Position = 0; var sr = new StreamReader(csvStream); string result = sr.ReadToEnd(); sr.Dispose(); return result; } } catch (Exception ex) { LoggingUtility.LogException(ex); } return string.Empty; }
public static void readcards() { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, IgnoreTrailingSeparatorChar = true, IgnoreUnknownColumns = true }; CsvContext cc = new CsvContext(); IEnumerable<Card> allthecards = cc.Read<Card>(Path.Combine(Directory.GetCurrentDirectory(), "\\hd.csv"), inputFileDescription); foreach (Card c in allthecards) { Allcards.Add(c); } }
public void GoodFileCommaDelimitedNamesInFirstLineUSEnglish() { // Arrange CsvFileDescription fileDescription_namesUs = new CsvFileDescription { SeparatorChar = ',', // default is ',' FirstLineHasColumnNames = true, EnforceCsvColumnAttribute = false, // default is false FileCultureName = "en-US" // default is the current culture }; string testInput = @"name, weight, startDate, launchTime, nbrAvailable,onsale,shopsAvailable, code, price, description moonbuggy, 34.184, 5/23/08, 5-May-2009 4:11 pm, 1205, true, ""Paris, New York"", 1F, $540.12, newly launched product ""mouse trap"",45E-5, 1/2/1985, ""7 August 1988, 0:00 am"", ""4,030"", FALSE, ""This field has a newline"", 100, ""$78,300"", ""This field has quotes(""""), and two newlines and a quoted """"string"""""" dog house, ""45,230,990"",29 Feb 2004, , -56, True,"""", FF10, ""12,008"""; var expected = new [] { new ProductData { name = "moonbuggy", weight = 34.184, startDate = new DateTime(2008, 5, 23), launchTime = new DateTime(2009, 5, 5, 16, 11, 0), nbrAvailable = 1205, onsale = true, shopsAvailable = "Paris, New York", hexProductCode = 31, retailPrice = 540.12M, description = "newly launched product" }, new ProductData { name = "mouse trap", weight = 45E-5, startDate = new DateTime(1985, 1, 2), launchTime = new DateTime(1988, 8, 7, 0, 0, 0), nbrAvailable = 4030, onsale = false, shopsAvailable = @"This field has a newline", hexProductCode = 256, retailPrice = 78300M, description = @"This field has quotes(""), and two newlines and a quoted ""string""" }, new ProductData { name = "dog house", weight = 45230990, startDate = new DateTime(2004, 2, 29), launchTime = default(DateTime), nbrAvailable = -56, onsale = true, shopsAvailable = "", hexProductCode = 65296, retailPrice = 12008M, description = null } }; // Act and Assert AssertRead(testInput, fileDescription_namesUs, expected); }
public AdditonalSmartsSettingsProvider() { _csvFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, EnforceCsvColumnAttribute = true }; _csvContext = new CsvContext(); _writequeued = false; _storage = new Storage(); _filename = _storage.CombineDocumentsFullPath(SettingsFileName); Read(); // do initial read _watcher = new FileSystemWatcher {Path=Path.GetDirectoryName(_filename), Filter = Path.GetFileName(_filename), NotifyFilter = NotifyFilters.LastWrite}; _watcher.Changed += OnChanged; _watcher.EnableRaisingEvents = true; }
public async Task<IEnumerable<ProductImport>> Import(string path) { return await Task.Factory.StartNew(() => { IEnumerable<ProductImport> ProductImports; try { var inputFileDescription = new CsvFileDescription { // cool - I can specify my own separator! SeparatorChar = '\t',//tab delimited FirstLineHasColumnNames = false, QuoteAllFields = true, EnforceCsvColumnAttribute = true }; CsvContext cc = new CsvContext(); ProductImports = cc.Read<ProductImport>(path, inputFileDescription); } catch (FileNotFoundException ex) { MessageBox.Show("File not found on specified path:\n" + path); return null; } catch (FieldAccessException ex) { MessageBox.Show("File cannot be accessed,is it in use by another application?", "Importer Error", MessageBoxButton.OK, MessageBoxImage.Stop); return null; } catch (Exception ex) { MessageBox.Show("Unknown Error:Details\n" + ex.Message, "Importer Error", MessageBoxButton.OK, MessageBoxImage.Error); return null; } return ProductImports; }); }
private void Export() { if (_salesPendingExport.Any()) { var orders = _salesPendingExport.Select(OrderExportHelper.MapSalesExport).ToList(); var orderToCsv = new List<ExportSaleItem>(); foreach (var order in orders) { orderToCsv.AddRange(order); } var outputFileDescription = new CsvFileDescription { // cool - I can specify my own separator! SeparatorChar = ',', FirstLineHasColumnNames = false, QuoteAllFields = true, EnforceCsvColumnAttribute = true }; try { Messenger.Default.Send(DateTime.Now.ToString("hh:mm:ss") + " Attempting to export {0} sales", orders.Count); //var cc = new CsvContext(); //cc.Write(orderToCsv, OrderExportHelper.GetExportFileName("AllSales"), outputFileDescription); DumpExportFilesAsync(orderToCsv.ToCsv(),OrderExportHelper.GetExportFileName("AllSales")); _salesPendingExport.OrderBy(p => p.GenericOrderReference).Distinct().ToList().ForEach( OrderExportHelper.MarkAsExported); Messenger.Default.Send(DateTime.Now.ToString("hh:mm:ss") + string.Format(" Export compeleted for={0} sales", orders.Count)); _salesPendingExport.Clear(); } catch (Exception ex) { Messenger.Default.Send(DateTime.Now.ToString("hh:mm:ss") + "Error occured while exporting..See error logs for details"); FileUtility.LogError(ex.Message); } } else { Messenger.Default.Send(DateTime.Now.ToString("hh:mm:ss") + " No sales exported"); } }
public FileResult CreateCsv(List<ContactCsvLine> items) { CsvFileDescription outputFileDescription = new CsvFileDescription { EnforceCsvColumnAttribute = true }; CsvContext cc = new CsvContext(); using (MemoryStream ms = new MemoryStream()) { using (TextWriter tw = new StreamWriter(ms)) { cc.Write(items, tw, outputFileDescription); } return File(ms.ToArray(), "text/csv", "ContactList.csv"); } }
public StockPredictionsFileReader(string fileName) { _fileName = fileName; CsvContext cc = new CsvContext(); CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, FileCultureName = "en" }; var filecontent = cc.Read<StockPrediction>(_fileName, inputFileDescription).ToList(); _dates = filecontent.Select(x => x.Date).ToArray(); _accual = filecontent.Select(x => x.Accual).ToArray(); _predictied = filecontent.Select(x => x.Predicted).ToArray(); _networkFitness = filecontent.Select(x => x.NetworkFintess).ToArray(); _cnt = filecontent.Count(); }
// a helper to read the CSV private static void ReadCSV() { try { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true }; CsvContext cc = new CsvContext(); ConfigCsv = cc.Read<CsvModel>(AppDomain.CurrentDomain.BaseDirectory + "\\config.csv", inputFileDescription).FirstOrDefault<CsvModel>(); } catch (Exception exc) { Logger.LogExceptions("Exception - Reading config.CSV file", exc); throw; } }
public static List<Transaction> GetTransactions(string csvFilePath) { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, FileCultureName = "en-AU", IgnoreTrailingSeparatorChar = true }; CsvContext cc = new CsvContext(); IEnumerable<ComsecTransactionCsv> comsecTransactions = cc.Read<ComsecTransactionCsv>(csvFilePath, inputFileDescription).ToList(); List<Transaction> transactions = comsecTransactions.Where(t => t.TransactionType == "Contract").Select(t => ToTransaction(t)).ToList(); return transactions; }
/// /////////////////////////////////////////////////////////////////////// /// FieldMapper /// /// <summary> /// Constructor /// </summary> /// <param name="fileDescription"></param> public FieldMapper(CsvFileDescription fileDescription, string fileName, bool writingFile) { if ((!fileDescription.FirstLineHasColumnNames) && (!fileDescription.EnforceCsvColumnAttribute)) { throw new CsvColumnAttributeRequiredException(); } // --------- m_fileDescription = fileDescription; m_fileName = fileName; m_NameToInfo = new Dictionary <string, TypeFieldInfo>(); AnalyzeType( typeof(T), !fileDescription.FirstLineHasColumnNames, writingFile && !fileDescription.FirstLineHasColumnNames); }
/// /////////////////////////////////////////////////////////////////////// /// FieldMapper /// /// <summary> /// Constructor /// </summary> /// <param name="fileDescription"></param> public FieldMapper(CsvFileDescription fileDescription, string fileName, bool writingFile) { if ((!fileDescription.FirstLineHasColumnNames) && (!fileDescription.EnforceCsvColumnAttribute)) { throw new CsvColumnAttributeRequiredException(); } // --------- m_fileDescription = fileDescription; m_fileName = fileName; //if case insensitive set then declare dictionary with ignorecase m_NameToInfo = m_fileDescription.IgnoreCaseOnColumnNames ? new Dictionary <string, TypeFieldInfo>(StringComparer.InvariantCultureIgnoreCase) : new Dictionary <string, TypeFieldInfo>(); AnalyzeType( typeof(T), !fileDescription.FirstLineHasColumnNames, writingFile && !fileDescription.FirstLineHasColumnNames); }
public void GoodFileTabDelimitedNoNamesInFirstLineNLnl() { // Arrange CsvFileDescription fileDescription_nonamesNl = new CsvFileDescription { SeparatorChar = '\t', // tab character FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true, FileCultureName = "nl-NL" // default is the current culture }; string testInput = "moonbuggy\t 23/5/08\t 5-Mei-2009 16:11 pm\t 34.184\t \"Paris, New York\"\t 1F\t €540,12\t true\t newly launched product\r\n\"mouse trap\"\t 2/1/1985\t \"7 Augustus 1988\t 0:00\"\t45E-5\t \"This field has\r\na newline\"\t 100\t \"€78.300\"\t FALSE\t \"This field has quotes(\"\"), and\r\ntwo newlines\r\nand a quoted \"\"string\"\"\"\r\ndog house\t29 Feb 2004\t \t \"45.230.990\"\t\"\"\t FF10\t \"12.008\"\t True"; var expected = new[] { new ProductData { name = "moonbuggy", weight = 34184, startDate = new DateTime(2008, 5, 23), launchTime = new DateTime(2009, 5, 5, 16, 11, 0), nbrAvailable = 0, onsale = true, shopsAvailable = "Paris, New York", hexProductCode = 31, retailPrice = 540.12M, description = "newly launched product" }, new ProductData { name = "mouse trap", weight = 45E-5, startDate = new DateTime(1985, 1, 2), launchTime = new DateTime(1988, 8, 7, 0, 0, 0), nbrAvailable = 0, onsale = false, shopsAvailable = @"This field has a newline", hexProductCode = 256, retailPrice = 78300M, description = @"This field has quotes(""), and two newlines and a quoted ""string""" }, new ProductData { name = "dog house", weight = 45230990, startDate = new DateTime(2004, 2, 29), launchTime = default(DateTime), nbrAvailable = 0, onsale = true, shopsAvailable = "", hexProductCode = 65296, retailPrice = 12008M, description = null } }; // Act and Assert AssertRead(testInput, fileDescription_nonamesNl, expected); }
/// <summary> /// Ases the data set. /// </summary> /// <param name="fileName">Name of the file.</param> /// <param name="fileDescription">The file description.</param> /// <returns></returns> public System.Data.DataSet AsDataSet(string fileName, CsvFileDescription fileDescription) { bool useFirstRow = false; //Throw error if we state we use columnnames AND we have skip first line if (fileDescription.SkipFirstLine && fileDescription.FirstLineHasColumnNames) { throw new BadFirstLineHasColumnNamesValueException(); } //If we have first line for column headers, we need to disable this for the processing as we need to read that row to build our column names if (fileDescription.FirstLineHasColumnNames) { useFirstRow = true; //Disable this - otherwise the first row is skipped the first row so we can create datacolumns using the info fileDescription.FirstLineHasColumnNames = false; } IEnumerable <DataSetDataRowItem> ie = ReadData <DataSetDataRowItem>(fileName, null, fileDescription); return(CreateDataSet(ie, useFirstRow, fileDescription.FileCultureInfo)); }
public static Boolean importAdjacentRooms(String filename) { CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true }; CsvContext cc = new CsvContext(); HousingContext hdb = new HousingContext(); List<AdjacentRoomsCSV> csvadjacentRooms = cc.Read<AdjacentRoomsCSV>(filename, inputFileDescription).ToList(); List<Room> rooms = ( from room in hdb.rooms select room ).ToList(); foreach (AdjacentRoomsCSV csvRoom in csvadjacentRooms) { List<Room> adjacentRooms = new List<Room>(); foreach (Room r in rooms.Where( x => x.floor.level == csvRoom.FloorNumber && x.roomNumber == csvRoom.RoomNumber && x.floor.building.name == csvRoom.BuildingName && x.floor.building.campus.campusName == csvRoom.CampusName )) { adjacentRooms = ( from room in hdb.rooms where csvRoom.RoomNumber == r.roomNumber && csvRoom.AdjacentRoomNumber == room.roomNumber && csvRoom.FloorNumber == r.floor.level && room.FloorID == r.FloorID select room ).ToList(); if (adjacentRooms != null && adjacentRooms.Count > 0) { r.adjacentRooms.Add(adjacentRooms.First()); } } } hdb.SaveChanges(); return true; }
public Task<IEnumerable<MasterImportEntity>> ReadFromCsVFileAsync(string filePath) { return Task.Run(() => { if (string.IsNullOrEmpty(filePath)) return new List<MasterImportEntity>(); try { var inputFileDescription = new CsvFileDescription { // cool - I can specify my own separator! SeparatorChar = ',', FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = false }; var imports = new CsvContext().Read<MasterImportEntity>(filePath, inputFileDescription); return imports; } catch (FileNotFoundException ex) { throw ex; } catch (FieldAccessException ex) { throw ex; } catch (Exception ex) { MessageBox.Show(ex.Message, "Importer Error", MessageBoxButton.OK, MessageBoxImage.Error); return null; } }); }
/// <summary> /// Get the card list with a specified csvpath /// </summary> /// <param name="csvPath">Location on the HDD of the card data csv</param> private static List<Card> GetCardList(string csvPath) { List<Card> cardlist = new List<Card>(); CsvFileDescription inputFileDescription = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, IgnoreTrailingSeparatorChar = true, IgnoreUnknownColumns = true }; CsvContext cc = new CsvContext(); IEnumerable<Card> allTheCards = cc.Read<Card>(csvPath, inputFileDescription); var cardList = new List<Card>(); foreach (Card c in allTheCards) { cardlist.Add(c); } return cardlist; }
/// /////////////////////////////////////////////////////////////////////// /// ReadData /// <summary> /// /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fileName"> /// Name of the file associated with the stream. /// /// If this is not null, a file is opened with this name. /// If this is null, the method attempts to read from the passed in stream. /// </param> /// <param name="stream"> /// All data is read from this stream, unless fileName is not null. /// /// This is a StreamReader rather then a TextReader, /// because we need to be able to seek back to the start of the /// stream, and you can't do that with a TextReader (or StringReader). /// </param> /// <param name="fileDescription"></param> /// <returns></returns> private IEnumerable <T> ReadData <T>( string fileName, StreamReader stream, CsvFileDescription fileDescription) where T : class, new() { // If T implements IDataRow, then we're reading raw data rows bool readingRawDataRows = typeof(IDataRow).GetTypeInfo().IsAssignableFrom(typeof(T).GetTypeInfo()); // The constructor for FieldMapper_Reading will throw an exception if there is something // wrong with type T. So invoke that constructor before you open the file, because if there // is an exception, the file will not be closed. // // If T implements IDataRow, there is no need for a FieldMapper, because in that case we're returning // raw data rows. FieldMapper_Reading <T> fm = null; if (!readingRawDataRows) { fm = new FieldMapper_Reading <T>(fileDescription, fileName, false); } // ------- // Each time the IEnumerable<T> that is returned from this method is // accessed in a foreach, ReadData is called again (not the original Read overload!) // // So, open the file here, or rewind the stream. bool readingFile = !string.IsNullOrEmpty(fileName); if (readingFile) { stream = new StreamReader( File.Open(fileName, FileMode.Open), fileDescription.TextEncoding, fileDescription.DetectEncodingFromByteOrderMarks); } else { // Rewind the stream if ((stream == null) || (!stream.BaseStream.CanSeek)) { throw new BadStreamException(); } stream.BaseStream.Seek(0, SeekOrigin.Begin); } // ---------- CsvStream cs = new CsvStream(stream, null, fileDescription.SeparatorChar, fileDescription.IgnoreTrailingSeparatorChar); // If we're reading raw data rows, instantiate a T so we return objects // of the type specified by the caller. // Otherwise, instantiate a DataRow, which also implements IDataRow. IDataRow row = null; if (readingRawDataRows) { row = new T() as IDataRow; } else { row = new DataRow(); } AggregatedException ae = new AggregatedException(typeof(T).ToString(), fileName, fileDescription.MaximumNbrExceptions); try { List <int> charLengths = null; if (!readingRawDataRows) { charLengths = fm.GetCharLengths(); } bool firstRow = true; while (cs.ReadRow(row, charLengths)) { // Skip empty lines. // Important. If there is a newline at the end of the last data line, the code // thinks there is an empty line after that last data line. if ((row.Count == 1) && ((row[0].Value == null) || (string.IsNullOrEmpty(row[0].Value.Trim())))) { continue; } if (firstRow && fileDescription.FirstLineHasColumnNames) { if (!readingRawDataRows) { fm.ReadNames(row); } } else { T obj = default(T); try { if (readingRawDataRows) { obj = row as T; } else { obj = fm.ReadObject(row, ae); } } catch (AggregatedException ae2) { // Seeing that the AggregatedException was thrown, maximum number of exceptions // must have been reached, so rethrow. // Catch here, so you don't add an AggregatedException to an AggregatedException throw ae2; } catch (Exception e) { // Store the exception in the AggregatedException ae. // That way, if a file has many errors leading to exceptions, // you get them all in one go, packaged in a single aggregated exception. ae.AddException(e); } yield return(obj); } firstRow = false; } } finally { if (readingFile) { stream.Dispose(); } // If any exceptions were raised while reading the data from the file, // they will have been stored in the AggregatedException ae. // In that case, time to throw ae. ae.ThrowIfExceptionsStored(); } }
public IEnumerable <T> Read <T>(StreamReader stream, CsvFileDescription fileDescription) where T : class, new() { return(ReadData <T>(null, stream, fileDescription)); }