public ActionResult Import(HttpPostedFileBase file) { if( file.ContentLength>0 ) { using(var sr = new StreamReader(file.InputStream)) { var engine = new DelimitedFileEngine<WareCatalogItem>(Encoding.UTF8); var items = engine.ReadStream(sr); var existingWares = Session.QueryOver<Ware>().List(); var wareMap = existingWares.ToDictionary(x => x.Sku); foreach(var item in items) { if( wareMap.ContainsKey(item.Sku) ) { var ware = wareMap[item.Sku]; ware.Name = item.Name; } else { var ware = new Ware {Sku = item.Sku, Name = item.Name}; Session.Save(ware); } } } } return RedirectToAction("Import"); }
//-> Name:Ignore Mode Error handling //-> Description:Read the file dropping bad records //-> File:RunEngine.cs /// <summary> /// Run an example of running a file with an error using the /// IgnoreMode option to silently drop bad records /// </summary> public override void Run() { var engine = new DelimitedFileEngine<Customer>(); // Switch error mode on engine.ErrorManager.ErrorMode = ErrorMode.IgnoreAndContinue; // This fails with not in enumeration error Customer[] customers = engine.ReadFile("Input.txt"); // This wont display anything, we have dropped it foreach (var err in engine.ErrorManager.Errors) { Console.WriteLine(); Console.WriteLine("Error on Line number: {0}", err.LineNumber); Console.WriteLine("Record causing the problem: {0}", err.RecordString); Console.WriteLine("Complete exception information: {0}", err.ExceptionInfo.ToString()); } // This will display only 3 of the four records foreach (var cust in customers) { Console.WriteLine("Customer name {0} is a {1}", cust.ContactName, cust.ContactTitle); } }
private void RunTests(string fileName, Type type, string delimiter) { DelimitedFileEngine engine = new DelimitedFileEngine(type); engine.Options.Delimiter = delimiter; object[] res = Common.ReadTest(engine, fileName); Assert.AreEqual(ExpectedRecords, res.Length); }
//-> /File //-> File:RunEngine.cs public override void Run() { var engine = new DelimitedFileEngine<Customer>(); // Read input records, enumeration automatically converted Customer[] customers = engine.ReadFile("Input.txt"); foreach (var cust in customers) Console.WriteLine("Customer name {0} is a {1}", cust.ContactName, cust.ContactTitle); }
public override void Run() { var customers = CreateCustomers(); var engine = new DelimitedFileEngine<CustomersVerticalBar>(); engine.Options.Fields[2].TrimMode = TrimMode.Both; engine.Options.RemoveField("DummyField"); //engine.Options.Fields[3].IsOptional //engine.ReadString(customers) }
public static FileHelperEngine GetEngineForType(Type recordType) { FileHelperEngine rtnVal; if (recordType.GetCustomAttributes(false).Count(p => p.GetType() == typeof(DelimitedRecordAttribute)) == 1) rtnVal = new DelimitedFileEngine(recordType); else if (recordType.GetCustomAttributes(false).Count(p => p.GetType() == typeof(FixedLengthRecordAttribute)) == 1) rtnVal = new FixedFileEngine(recordType); else throw new InvalidOperationException(String.Format("Record type {0} is not a Filehelpers class", recordType)); return rtnVal; }
/// <summary> /// Take sample data and convert it into two other /// delimitted representations /// </summary> private void cmdRun_Click(object sender, EventArgs e) { CustomersVerticalBar[] customers = (CustomersVerticalBar[]) grid1.SelectedObject; // DelimitedFileEngine engine = new DelimitedFileEngine(typeof (CustomersVerticalBar)); txtOut1.Text = engine.WriteString(customers); engine.Options.Delimiter = ";"; txtOut2.Text = engine.WriteString(customers); engine.Options.Delimiter = "\t"; txtOut3.Text = engine.WriteString(customers); }
//-> Name:ErrorMode saving Errors //-> Description:Read the file saving bad records //-> File:RunEngine.cs /// <summary> /// Run an example of running a file with an error using the /// ErrorMode option to capture bad records and then saving them /// </summary> public override void Run() { var engine = new DelimitedFileEngine<Customer>(); // Switch error mode on engine.ErrorManager.ErrorMode = ErrorMode.SaveAndContinue; // This fails with not in enumeration error Customer[] customers = engine.ReadFile("Input.txt"); if (engine.ErrorManager.HasErrors) engine.ErrorManager.SaveErrors("errors.out"); LoadErrors(); }
//-> File:RunEngine.cs /// <summary> /// Run an example of writing a delimited file and /// changing the delimiter to show how it is done. /// </summary> public override void Run() { var customers = CreateCustomers(); var engine = new DelimitedFileEngine<CustomersVerticalBar>(); // write out customers using a vertical bar delimiter (default) engine.WriteFile("Out_Vertical.txt", customers); // Change the delimiter to semicolon and write that out engine.Options.Delimiter = ";"; engine.WriteFile("Out_SemiColon.txt", customers); // Change the delimiter to a tab and write that out engine.Options.Delimiter = "\t"; engine.WriteFile("Out_Tab.txt", customers); }
//-> /File public override void Run() { //-> File:Example.txt var engine = new DelimitedFileEngine<CustomersVerticalBar>(); engine.Options.Fields[2].TrimMode = TrimMode.Both; engine.Options.RemoveField("DummyField"); // City is optional engine.Options.Fields[engine.Options.Fields.Count - 1].IsOptional = true; engine.ReadFile("Input.txt"); //-> /File }
//-> Name:ErrorMode Error handling //-> Description:Read the file rejecting bad records //-> File:RunEngine.cs /// <summary> /// Run an example of running a file with an error using the /// ErrorMode option to capture bad records /// </summary> /// <remarks> /// In the standard mode you can catch the exceptions when something fails. /// </remarks> public override void Run() { var engine = new DelimitedFileEngine<Customer>(); // Switch error mode on engine.ErrorManager.ErrorMode = ErrorMode.SaveAndContinue; // Only record that fails will not be present Customer[] customers = engine.ReadFile("Input.txt"); // This will display error from line 2 of the file. foreach (var err in engine.ErrorManager.Errors) { Console.WriteLine(); Console.WriteLine("Error on Line number: {0}", err.LineNumber); Console.WriteLine("Record causing the problem: {0}", err.RecordString); Console.WriteLine("Complete exception information: {0}", err.ExceptionInfo.ToString()); } }
//-> /File public override void Run() { //-> File:Example.cs try { var engine = new DelimitedFileEngine<Customer>(); // This fails with not in enumeration error var customers = engine.ReadFile("Input.txt"); } catch (Exception ex) { Console.WriteLine(ex.ToString()); // with stack trace } //-> /File }
private static void ProcessAppend(SimpleData testdata, DelimitedFileEngine<SimpleData> engine, String filename, String twoRecords, string LineEnds, string testname) { using (var fs = new StreamWriter(filename)) { fs.Write(testdata.Record); fs.Write(LineEnds); // lots of blanks lines to trim fs.Close(); } engine.AppendToFile(filename, testdata); using (var input = new StreamReader(filename)) { String result = input.ReadToEnd(); result.AssertEqualTo<String>(twoRecords, testname + ": Expected two records only on output"); input.Close(); } }
//-> Name:Simple Error handling //-> Description:Read the file or reject the whole file //-> File:RunEngine.cs /// <summary> /// Run an example of running a file with an error through and /// using a try catch to collect the error. /// </summary> /// <remarks> /// In the standard mode you can catch the exceptions when something fails. /// </remarks> public override void Run() { try { var engine = new DelimitedFileEngine<Customer>(); // This fails with not in enumeration error Customer[] customers = engine.ReadFile("Input.txt"); // this will not happen because of the exception foreach (var cust in customers) { Console.WriteLine("Customer name {0} is a {1}", cust.ContactName, cust.ContactTitle); } } catch (Exception ex) { // Console.WriteLine(ex.ToString()); // with stack trace this.Console.WriteLine(ex.Message); } }
public void TestDosFile() { SimpleData testdata = new SimpleData() { Record = "one record only" }; var engine = new DelimitedFileEngine<SimpleData>(); using (var filename = new TempFileFactory()) { String twoRecords = testdata.Record + StringHelper.NewLine + testdata.Record + StringHelper.NewLine; ProcessAppend(testdata, engine, filename, twoRecords, "\r\n\r\n", "Dos"); ProcessAppend(testdata, engine, filename, twoRecords, "\n\n\n", "Unix"); ProcessAppend(testdata, engine, filename, twoRecords, "\r\r\r", "Macintosh"); GC.Collect(); // Clean up the loose file stream from the testing } }
public void ArrayWriteMaxError5() { Assert.Throws<InvalidOperationException>( () => { var engine = new DelimitedFileEngine<ArrayModel2To4>(); var res = engine.WriteString(new[] { new ArrayModel2To4() {Id = 1, Name = "name1", Weighting = new float[] {10.2f, 1,2,3,4}} }); }); }
public void ArrayWriteFloatFieldsNullable() { var dataToExport = new List<ArrayModelNullable>(); dataToExport.Add(new ArrayModelNullable() { Id = 1, Name = "name1", Weighting = new float?[] { } }); var engine = new DelimitedFileEngine<ArrayModelNullable>(); var res = engine.WriteString(dataToExport); Assert.AreEqual("1,name1," + Environment.NewLine, res); var vals = engine.ReadString(res); vals.Length.AssertEqualTo(1); vals[0].Weighting.Length.AssertEqualTo(0); }
public void ArrayWriteFloatFields3() { var dataToExport = new List<ArrayModel1>(); dataToExport.Add(new ArrayModel1() { Id = 1, Name = "name1", Weighting = new float[] { 10.2f, 30.5f, 11f } }); var engine = new DelimitedFileEngine<ArrayModel1>(); var res = engine.WriteString(dataToExport); Assert.AreEqual("1,name1,10.2,30.5,11" + Environment.NewLine, res); }
public void ArrayReadFieldsNullAndNullable() { var info = "1,name1,10.2,,30.5"; var engine = new DelimitedFileEngine<ArrayModelNullable>(); var res = engine.ReadString(info); res.Length.AssertEqualTo(1); res[0].Weighting.Length.AssertEqualTo(3); res[0].Weighting[1].AssertEqualTo(null); }
public void AddRegisteredCustomMetrics() { if (File.Exists(AgentContext.CustomMetricRegisterFilePath)) { // Acquire lock FileStream csvFile = null; for (var i = 0; i < 10; i++) { try { csvFile = File.Open(AgentContext.CustomMetricRegisterFilePath, FileMode.Open, FileAccess.ReadWrite, FileShare.None); break; } catch { Thread.Sleep(1000); } } if (csvFile == null) { Log.Warn(string.Format("Could not acquire lock on {0}; Skipping registered custom metrics", AgentContext.CustomMetricRegisterFilePath)); return; } // Read current metrics var engine = new DelimitedFileEngine(typeof(RegisterHelper)); try { var registeredMetrics = engine.ReadStream(new StreamReader(csvFile)) as RegisterHelper[]; Register = registeredMetrics.GroupBy(r => r.Textkey).ToDictionary(g => g.Key, g => g.First().Description); } catch { Log.Warn(string.Format("There is a problem with the format of {0}; Skipping registered custom metrics", AgentContext.CustomMetricRegisterFilePath)); csvFile.Close(); return; } // Remove all synced metrics csvFile.SetLength(0); // Release lock csvFile.Close(); } }
public bool WriteFile(string fileName) { DelimitedFileEngine<MedSeekData> fileEngine = new DelimitedFileEngine<MedSeekData> { HeaderText = FileHeader }; fileEngine.WriteFile(fileName, data); return System.IO.File.Exists(fileName); }
/// <summary>Read a delimited data file to a DataTable</summary> /// <param name="fileName">The name of the delimited file to read, including absolute or relative path as appropriate</param> /// <param name="delimiter">Delimiter character to be used</param> /// <param name="hasHeader">Is there a header row containing column names? If not they will be generated automatically</param> /// <param name="maxRows">Only read this many rows (useful for testing with larger files)</param> /// <returns>The number of records processed</returns> /// <exception cref="System.ArgumentException">Throws an exception if delimited file name is not supplied</exception> /// <exception cref="System.Exception">Throws an exception if delimited file name does not exist or cannot be accessed</exception> public static DataTable Delimited2DT(string fileName, char delimiter, bool hasHeader, int maxRows) { //Fail if fileName not passed in if (fileName == String.Empty) throw new ArgumentException("file name required", "fileName"); //Set up the delimited file reader DelimitedFileEngine<DelimitedRow> engine = new DelimitedFileEngine<DelimitedRow>(); engine.Options.Delimiter = delimiter.ToString(); engine.Options.IgnoreEmptyLines = true; //engine.Options.IgnoreCommentedLines = true; DataTable dt = new DataTable(); long recordCount = 0; DelimitedRow[] rows; if (!System.IO.File.Exists(fileName)) { throw new Exception(string.Format("Problem finding file {0}?", fileName)); } try { rows = engine.ReadFile(fileName, maxRows); //TODO: Need to allow reading a file that is already open elsewhere //System.IO.TextReader tr1 = new System.IO.StreamReader(cmdFileName); //System.IO.TextReader tr2 = System.IO.File.OpenText(cmdFileName); //rows = engine.ReadStream(tr,maxRows); //tr.Close(); } catch (System.Exception ex) { throw new Exception(string.Format("Problem reading file {0}? Check the file exists and can be accessed, and is not currently open in another application", fileName), ex); } foreach (DelimitedRow row in rows) { //Is this the first row? if (recordCount == 0) { if (hasHeader) { // First row contains column names foreach (String s in row.fieldValues) { if (s != null && s.Trim().Length > 0 && !dt.Columns.Contains(s)) dt.Columns.Add(getValidColumnName(s), typeof(String)); else dt.Columns.Add(getNextColumnName(dt), typeof(String)); } } else { // First row just contains values so create default column names for each field //TODO: there's a CHANCE that first row may not have as many values as there are columns //not sure how to guard against that yet... foreach (String s in row.fieldValues) { dt.Columns.Add(getNextColumnName(dt), typeof(String)); } dt.Rows.Add(row.fieldValues); } } else //not first row { //still a CHANCE that we have more or less field values than DataTable columns //Ensure that what's being added will match up with DataTable.Columns.Count, however //this may hide errors where a field value contains commas and is incorrectly parsed String[] fieldValues = new String[dt.Columns.Count]; for (int i = 0; i < dt.Columns.Count; i++) { if (i < row.fieldValues.Length) fieldValues[i] = row.fieldValues[i]; } //fieldValues.Length should now match DataTable.Columns.Count //Strip leading and trailing quotes if present //(CSV engine should be doing this but doesn't seem to) for (int i = 0; i < fieldValues.Length; i++) { if ((fieldValues[i] != null) && (fieldValues[i].StartsWith("\"") && (fieldValues[i].EndsWith("\"")))) fieldValues[i] = fieldValues[i].Substring(1, fieldValues[i].Length - 2); //05/09/12 - UNICODE? //if (!fieldValues[i].IsNormalized()) //fieldValues[i] = EscapeUnicode(fieldValues[i]); } dt.Rows.Add(fieldValues); } recordCount++; if (maxRows >= 0 && recordCount >= maxRows) break; } return dt; }
public Dictionary<string, List<ReportHelper>> GetMetricValues() { if (File.Exists(AgentContext.CustomMetricReportFilePath)) { // Acquire lock FileStream csvFile = null; for (var i = 0; i < 10; i++) { try { csvFile = File.Open(AgentContext.CustomMetricReportFilePath, FileMode.Open, FileAccess.ReadWrite, FileShare.None); break; } catch { System.Threading.Thread.Sleep(1000); } } if (csvFile == null) { Log.Warn(string.Format("Could not acquire lock on {0}; Skipping reported custom metrics", AgentContext.CustomMetricReportFilePath)); return new Dictionary<string, List<ReportHelper>>(); } // Read current metrics var uniqueValues = new Dictionary<string, ReportHelper>(); var engine = new DelimitedFileEngine(typeof(ReportHelper)); try { var reportMetrics = engine.ReadStream(new StreamReader(csvFile)) as ReportHelper[]; foreach (var item in reportMetrics) { try { var timestamp = DateTime.ParseExact(item.Timestamp, "yyyy-MM-dd HH:mm:ss", null); var value = Double.Parse(item.Value); uniqueValues[string.Format("{0}:{1}", item.Textkey, timestamp.ToString("yyyy-MM-ddHH:mm"))] = item; } catch { continue; } } var uniqueMetricList = uniqueValues.Values.ToList(); uniqueMetricList = uniqueMetricList.OrderBy(s => s.Timestamp).ToList(); var customValues = new Dictionary<string, List<ReportHelper>>(); foreach (var metric in uniqueMetricList) { if (!customValues.ContainsKey(metric.Textkey)) { customValues[metric.Textkey] = new List<ReportHelper> { metric }; } else { customValues[metric.Textkey].Add(metric); } } // Remove all synced metrics csvFile.SetLength(0); // Release lock csvFile.Close(); return customValues; } catch { Log.Warn(string.Format("There is a problem with the format of {0}; Skipping reported custom metrics", AgentContext.CustomMetricReportFilePath)); csvFile.Close(); return new Dictionary<string, List<ReportHelper>>(); } } else { return new Dictionary<string, List<ReportHelper>>(); } }
public static void csvAllResult(System.Collections.Hashtable raceStat, string datFile, infoRace currInfoRace) { System.Collections.ArrayList sorted = new System.Collections.ArrayList(); System.Collections.IDictionaryEnumerator tmpRaceStat = raceStat.GetEnumerator(); while (tmpRaceStat.MoveNext()) //for each player { raceStats p = (raceStats)tmpRaceStat.Value; sorted.Add(p); } raceStats.modeSort = (int)sortRaceStats.SORT_RESULT; sorted.Sort(); if(!Directory.Exists("Export")) System.IO.Directory.CreateDirectory("Export"); var raceEngine = new DelimitedFileEngine<infoRace>(); raceEngine.HeaderText = "datFile,currentTrackName,maxSplit,weather,wind,raceLaps,sraceLaps,qualMins,HName,currLap,isToc"; var raceResults = new List<infoRace>(); currInfoRace.datFile = datFile; raceResults.Add(currInfoRace); raceEngine.WriteFile("Export/" + datFile + "_race.csv", raceResults); var engine = new DelimitedFileEngine<raceStats>(); engine.HeaderText = "datFile,UCID,PLID,userName,nickName,Plate,bestSplit1,lapBestSplit1,bestSplit2,lapBestSplit2,bestSplit3,lapBestSplit3,bestLastSplit,lapBestLastSplit,cumuledTime,bestSpeed,lapBestSpeed,numStop,cumuledStime,resultNum,finalPos,finished,finPLID,totalTime,bestLap,lapBestLap,CName,penalty,gridPos,lapsLead,tmpTime,firstTime,avgTime,curBestSplit,curWrSplit,curLapBestSplit,lapStability,curSplit1,curSplit2,curSplit3,yellowFlags,inYellow,blueFlags,inBlue,sFlags,numPen,lastSplit,CurrIdxSplit"; List<raceStats> results = new List<raceStats>(); foreach (raceStats r in sorted) { r.datFile = datFile; results.Add(r); } engine.WriteFile("Export/" + datFile + "_results_race.csv", results); List<Lap> lapResults = new List<Lap>(); foreach (raceStats r in sorted) { int i = 1; foreach (Lap lap in r.lap) { lap.datFile = datFile; lap.UCID = r.UCID; lap.PLID = r.PLID; lap.lap = i++; lapResults.Add((Lap)lap); } } var engine2 = new DelimitedFileEngine<Lap>(); engine2.HeaderText = "datFile,UCID,PLID,lap,split1,split2,split3,lapTime,cumuledTime"; engine2.WriteFile("Export/" + datFile + "_results_race_laps.csv", lapResults); ////this.ContentTypeFilters.Register(ContentType.Csv, CsvSerializer.SerializeToStream, CsvSerializer.DeserializeFromStream); ////using (System.IO.StreamWriter sw = new System.IO.StreamWriter("Export/" + datFile + "_results_race.csv")) ////{ // //int curPos = 0; // for (int i = 0; i < sorted.Count; i++) // { // raceStats p = (raceStats)sorted[i]; // //curPos++; // //raceStats p = (raceStats)sorted[i]; // //if (i == 0) // //{ // // firstMaxLap = p.lap.Count; // // firstTotalTime = p.totalTime; // //} // //string resultLine = formatLine; // //resultLine = resultLine.Replace("[RaceResults ", ""); // //resultLine = resultLine.Replace("]", ""); // //resultLine = resultLine.Replace("{Position}", curPos.ToString()); // //// resultLine = resultLine.Replace("{Position}", p.resultNum.ToString() ); // //resultLine = resultLine.Replace("{PlayerName}", p.nickName.Replace("^0", "").Replace("^1", "").Replace("^2", "").Replace("^3", "").Replace("^4", "").Replace("^5", "").Replace("^6", "").Replace("^7", "").Replace("^8", "")); // //resultLine = resultLine.Replace("{UserName}", p.userName); // //resultLine = resultLine.Replace("{Car}", p.CName); // //// if Racer do not finish // //if (p.resultNum == 999) // // resultLine = resultLine.Replace("{Gap}", "DNF"); // //else // //{ // // if (firstMaxLap == p.lap.Count) // // { // // if (i == 0) // // resultLine = resultLine.Replace("{Gap}", raceStats.LfstimeToString(p.totalTime)); // // else // // { // // long tres; // // tres = p.totalTime - firstTotalTime; // // resultLine = resultLine.Replace("{Gap}", "+" + raceStats.LfstimeToString(tres)); // // } // // } // // else // // resultLine = resultLine.Replace("{Gap}", "+" + ((int)(firstMaxLap - p.lap.Count)).ToString() + " laps"); // //} // //resultLine = resultLine.Replace("{BestLap}", raceStats.LfstimeToString(p.bestLap)); // //resultLine = resultLine.Replace("{LapsDone}", p.lap.Count.ToString()); // //resultLine = resultLine.Replace("{PitsDone}", p.numStop.ToString()); // //resultLine = resultLine.Replace("{Penalty}", p.penalty); // //resultLine = resultLine.Replace("{PosGrid}", p.gridPos.ToString()); // //resultLine = resultLine.Replace("{Flags}", p.sFlags); // //sw.WriteLine(resultLine); // } ////} }
public void BadRecordType1() { DelimitedFileEngine engine = new DelimitedFileEngine(typeof(CustomersFixed)); }
public void ArrayWriteMinErrorNull() { Assert.Throws<InvalidOperationException>( () => { var engine = new DelimitedFileEngine<ArrayModel2To4>(); var res = engine.WriteString(new[] { new ArrayModel2To4() {Id = 1, Name = "name1", Weighting = null} }); }); }
public static IEnumerable<IQuote> ReadFile(this string filePath) { var fileEngine = new DelimitedFileEngine<QuoteImport>(); return fileEngine.ReadFile(filePath); }
public void BadRecordType2() { DelimitedFileEngine engine = new DelimitedFileEngine(null); }
public ActionResult ExportToFile() { var exportTasks = Query(new ExportClosedTasksQuery()); var taskDtos = exportTasks.Select(x => new TaskDto { ProjectSku = x.ProjectSku, ClientSku = x.ClientSku, Timestamp = x.Timestamp, Sku = x.Sku, Quantity = x.Quantity, Price = x.Price.ToString("0.00", CultureInfo.GetCultureInfo("en-US")), Description = x.Description.Replace('\n', ' ').Replace("\r", "") }); var encoding = Encoding.GetEncoding(1252); var engine = new DelimitedFileEngine<TaskDto>(encoding); var result = engine.WriteString(taskDtos); var data = encoding.GetBytes(result); Session.CreateSQLQuery("update Task set Status = :posted where Status = :closed") .SetEnum("posted", TaskStatus.Posted) .SetEnum("closed", TaskStatus.Closed) .ExecuteUpdate(); return File(data, "application/vnd.ms-excel", string.Format("export-{0:yyyyMMddHHmmss}.csv", DateTime.Now)); }