private static void RunTests(Encoding enc, params string[] pathElements) { var engine = new FileHelperEngine<CustomersVerticalBar>(); engine.Encoding = enc; Assert.AreEqual(enc, engine.Encoding); CoreRunTest(engine, pathElements); }
public void AppendOneToFile() { engine = new FileHelperEngine(typeof (SampleType)); SampleType[] res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1).Date; res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now.Date; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile(@"test.txt", res); SampleType record = new SampleType(); record.Field1 = DateTime.Now.Date; record.Field2 = "h2"; record.Field3 = 2; engine.AppendToFile(@"test.txt", record); SampleType[] res2 = (SampleType[]) engine.ReadFile(@"test.txt"); Assert.AreEqual(3, res2.Length); Assert.AreEqual(res[0].Field1, res2[0].Field1); Assert.AreEqual(res[1].Field1, res2[1].Field1); Assert.AreEqual(DateTime.Now.Date, res2[2].Field1); }
public void GetMSWSReportsFromURL_AsStream_20060709_28Records() { DateTime date = new DateTime(2006, 7, 20); string url = string.Format(MSWSDataUrl_Format, date.ToString(MSWSDataURL_DateFormat)); MSWSDailyReportRecord[] res = null; FileHelperEngine engine = new FileHelperEngine(typeof(MSWSDailyReportRecord)); // make request HttpWebRequest webReq = null; HttpWebResponse webResp = null; StreamReader reader = null; try { webReq = (HttpWebRequest)HttpWebRequest.Create(url); webResp = (HttpWebResponse)webReq.GetResponse(); Encoding encode = Encoding.GetEncoding("utf-8"); reader = new StreamReader(webResp.GetResponseStream(), encode); res = (MSWSDailyReportRecord[]) engine.ReadStream(reader); } catch { throw; } finally { if (webReq != null) webReq = null; if (webResp != null) webResp.Close(); if (reader != null) reader.Close(); } Assert.AreEqual(res.Length, 32); }
public void WriteFile2() { var engine = new FileHelperEngine<SampleType>(); var res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1); res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile(@"miprueba.txt", res, -10); res = (SampleType[]) engine.ReadFile(@"miprueba.txt"); if (File.Exists(@"miprueba.txt")) File.Delete(@"miprueba.txt"); Assert.AreEqual(2, res.Length); }
static void Import(string simAreasPath, DbConnection connection) { var simarea = new SimAreasContext(connection, false, new DropCreateDatabaseAlways<SimAreasContext>()); var engine = new FileHelperEngine(typeof(SimAreas)); var entries = (SimAreas[])engine.ReadFile(simAreasPath); foreach (var entry in entries) { var area = (from s in simarea.SimAreas where s.SimAreaName == entry.SimAreaName select s).FirstOrDefault(); if (area == null) { area = new SimArea { SimAreaName = entry.SimAreaName, Latitude = entry.Latitude, Longitude = entry.Longitude, Height = entry.Height, GeoidSeparation = entry.GeoidSeparation, OpsLimitFile = entry.OpsLimitFile, SimLimitFile = entry.SimLimitFile, }; simarea.SimAreas.Add(area); simarea.SaveChanges(); } } }
//-> /File //-> Now just create some records and write them with the Engine public override void Run() { //-> File:Example.cs var engine = new FileHelperEngine<Customer>(); var customers = new List<Customer>(); var order1 = new Customer() { CustId = 1, Name = "Antonio Moreno Taquería", AddedDate = new DateTime(2009, 05, 01) }; var order2 = new Customer() { CustId = 2, Name = "Berglunds snabbköp", AddedDate = new DateTime(2009, 05, 02) }; customers.Add(order1); customers.Add(order2); engine.WriteFile("Output.Txt", customers); //-> /File Console.WriteLine(engine.WriteString(customers)); }
public void RunTimeEmptyGuidProperties() { var builder = new DelimitedClassBuilder("EntityWithGuid", "\t"); builder.AddField("Name", typeof(string)); builder.AddField("Id", typeof(Guid)); builder.LastField.FieldNullValue = Guid.Empty; var engine = new FileHelperEngine(builder.CreateRecordClass()) { Options = { IgnoreFirstLines = 1 } }; const string inputValue = @"Name Id first second "; var records = engine.ReadString(inputValue); records.Length.AssertEqualTo(2); dynamic record = records[0]; ((Guid)record.Id).AssertEqualTo(Guid.Empty); ((string)record.Name).AssertEqualTo("first"); record = records[1]; ((Guid)record.Id).AssertEqualTo(Guid.Empty); ((string)record.Name).AssertEqualTo("second"); }
public void ExportStatus(TextWriter file, IList<StatusReport> reports) { var engine = new FileHelperEngine<StatusCsvItem>(); var items = new List<StatusCsvItem>(); // convert reports to items engine.WriteStream(file, items); }
public void Inherited1() { var engine = new FileHelperEngine<SampleInheritType>(); Assert.AreEqual(3, engine.Options.FieldCount); Assert.AreEqual("Field1", engine.Options.FieldsNames[0]); Assert.AreEqual("Field2", engine.Options.FieldsNames[1]); Assert.AreEqual("Field3", engine.Options.FieldsNames[2]); SampleInheritType[] res; res = TestCommon.ReadTest<SampleInheritType>(engine, "Good", "Test1.txt"); Assert.AreEqual(4, res.Length); Assert.AreEqual(4, engine.TotalRecords); Assert.AreEqual(0, engine.ErrorManager.ErrorCount); Assert.AreEqual(new DateTime(1314, 12, 11), res[0].Field1); Assert.AreEqual("901", res[0].Field2); Assert.AreEqual(234, res[0].Field3); Assert.AreEqual(new DateTime(1314, 11, 10), res[1].Field1); Assert.AreEqual("012", res[1].Field2); Assert.AreEqual(345, res[1].Field3); }
public IHttpActionResult PullSensorDataFromCsvFileAsList(string fileName) { try { //filehelper object FileHelperEngine engine = new FileHelperEngine(typeof(TemperatureSensor)); //set header text engine.HeaderText = "DeviceId,IsActive,SensorId,Temperature,LogDate,LogHour,LogMinute,LogSecond"; var result = StorageHelper.ReadDataFromBlob(string.Format("{0}.csv", fileName)).Result; //read the CSV file into your object Arrary var sensorsDataArray = (TemperatureSensor[])engine.ReadString(result); var sensorsDataList = sensorsDataArray.ToList(); if (sensorsDataList != null) return Ok(sensorsDataList); return BadRequest("Result was invalid"); } catch(Exception ex) { return BadRequest(ex.Message); } }
public void WriteStream() { engine = new FileHelperEngine(typeof (SampleType)); SampleType[] res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1); res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now; res[1].Field2 = "ho"; res[1].Field3 = 2; StringBuilder sb = new StringBuilder(); StringWriter writer = new StringWriter(sb); engine.WriteStream(writer, res, 1); Assert.AreEqual(14 + 2, sb.ToString().Length); Assert.AreEqual(sb.ToString(0, 8), DateTime.Now.AddDays(1).ToString("ddMMyyyy")); }
public StartAndEndPair[] Load() { var fh = new FileHelperEngine<CommentedStartAndEndPair>(); var values = fh.ReadFile(_fileName); var returnValues = new List<StartAndEndPair>(); foreach (var value in values) { if (_matchRequiredTag) { if (value.Comment.Contains(_requiredTag)) { returnValues.Add(new StartAndEndPair(value.Start, value.End)); } } else { returnValues.Add(new StartAndEndPair(value.Start, value.End)); } } return returnValues.ToArray(); }
/// <summary> /// Process the delimited file twice, /// once with the simple engine, once with Async /// </summary> /// <remarks> /// This is a simple sample of using the Normal engine and the Async engine /// </remarks> public void ReadWrite() { var engine = new FileHelperEngine<Orders>(); // to Read use: Orders[] res = engine.ReadFile(@"C:\TestIn.txt"); // to Write use: engine.WriteFile(@"C:\TestOut.txt", res); foreach (Orders order in res) { Console.WriteLine("Order Info:"); Console.WriteLine(order.CustomerID + " - " + order.OrderDate.ToString("dd/MM/yy")); } var asyncEngine = new FileHelperAsyncEngine<Orders>(); asyncEngine.BeginReadFile(@"C:\TestIn.txt"); Orders ord; while (asyncEngine.ReadNext() != null) { ord = asyncEngine.LastRecord; // your code here Console.WriteLine(ord.CustomerID); } }
public void MoreChars() { var engine = new FileHelperEngine<CustomerMore>(); engine.ErrorMode = ErrorMode.IgnoreAndContinue; var res = FileTest.Good.CustomersFixedExact .ReadWithEngine(engine); res.Length.AssertEqualTo(91); res = FileTest.Good.CustomersFixedLessChars10Records .ReadWithEngine(engine); res.Length.AssertEqualTo(81); res = FileTest.Good.CustomersFixedMoreChars5Records .ReadWithEngine(engine); res.Length.AssertEqualTo(91); res = FileTest.Good.CustomersFixedMoreVariable12Records .ReadWithEngine(engine); res.Length.AssertEqualTo(85); }
static void Main(string[] args) { List<SchoolTestResult> result = new List<SchoolTestResult>(); string[] urls = ConfigurationManager.AppSettings["urls"].Split(','); Console.WriteLine("Reading " + urls.Length + " files"); foreach (string url in urls) { Uri uri = new Uri(url); PdfReader reader = new PdfReader(uri); try { String text = PdfTextExtractor.GetTextFromPage(reader, 1); result.AddRange(new AssessmentReader().parse(text)); } catch (Exception e) { Console.WriteLine("Error Reading " + url); Console.WriteLine(e); } } string jsonOutput = JsonConvert.SerializeObject(result, Formatting.Indented); string csvOutput = new FileHelperEngine<SchoolTestResult>().WriteString( result ); Console.WriteLine(jsonOutput); Console.WriteLine(csvOutput); //Console.WriteLine(text); Console.ReadLine(); }
/// <summary> /// Run the engine into an array and show data on a grid /// </summary> private void cmdRun_Click(object sender, EventArgs e) { var engine = new FileHelperEngine<CustomersVerticalBar>(); CustomersVerticalBar[] res = (CustomersVerticalBar[]) engine.ReadString(txtData.Text); grid1.SelectedObject = res; }
public void Delimited0() { engine = new FileHelperEngine(typeof (OptionalDelimited1)); OptionalDelimited1[] res = (OptionalDelimited1[]) Common.ReadTest(engine, @"Good\OptionalDelimited0.txt"); Assert.AreEqual(ExpectedRecords, res.Length); }
public void ExceptionsTestsPriceConverterTest() { var engine = new FileHelperEngine<CustomConvType>(); Assert.Throws<ConvertException>( () => engine.ReadString(testTo)); }
public void FullClassBuilding() { var cb = new DelimitedClassBuilder("Customers", ","); cb.IgnoreFirstLines = 1; cb.IgnoreEmptyLines = true; cb.AddField("Field1", typeof(DateTime)); cb.LastField.TrimMode = TrimMode.Both; cb.LastField.QuoteMode = QuoteMode.AlwaysQuoted; cb.LastField.FieldNullValue = DateTime.Today; cb.AddField("Field2", typeof(string)); cb.LastField.FieldQuoted = true; cb.LastField.QuoteChar = '"'; cb.AddField("Field3", typeof(int)); mEngine = new FileHelperEngine(cb.CreateRecordClass()); DataTable dt = mEngine.ReadFileAsDT(TestCommon.GetPath("Good", "Test2.txt")); Assert.AreEqual(4, dt.Rows.Count); Assert.AreEqual(4, mEngine.TotalRecords); Assert.AreEqual(0, mEngine.ErrorManager.ErrorCount); Assert.AreEqual("Field1", dt.Columns[0].ColumnName); Assert.AreEqual("Field2", dt.Columns[1].ColumnName); Assert.AreEqual("Field3", dt.Columns[2].ColumnName); Assert.AreEqual("Hola", dt.Rows[0][1]); Assert.AreEqual(DateTime.Today, dt.Rows[2][0]); }
public void TestingNameAndTypes() { var cb = new DelimitedClassBuilder("Customers", ","); cb.IgnoreFirstLines = 1; cb.IgnoreEmptyLines = true; cb.AddField("Field1", typeof (DateTime)); cb.LastField.TrimMode = TrimMode.Both; cb.LastField.QuoteMode = QuoteMode.AlwaysQuoted; cb.LastField.FieldNullValue = DateTime.Today; cb.AddField("Field2", typeof (string)); cb.LastField.FieldQuoted = true; cb.LastField.QuoteChar = '"'; cb.AddField("Field3", typeof (int)); mEngine = new FileHelperEngine(cb.CreateRecordClass()); DataTable dt = mEngine.ReadFileAsDT(TestCommon.GetPath("Good", "Test2.txt")); Assert.AreEqual("Field1", dt.Columns[0].ColumnName); Assert.AreEqual(typeof (DateTime), dt.Columns[0].DataType); Assert.AreEqual("Field2", dt.Columns[1].ColumnName); Assert.AreEqual(typeof (string), dt.Columns[1].DataType); Assert.AreEqual("Field3", dt.Columns[2].ColumnName); Assert.AreEqual(typeof (int), dt.Columns[2].DataType); }
public void WriteEvents() { before = 0; after = 0; engine = new FileHelperEngine<SampleType>(); engine.BeforeWriteRecord += engine_BeforeWriteRecord; engine.AfterWriteRecord += engine_AfterWriteRecord; SampleType[] res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1); res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteString(res); Assert.AreEqual(2, engine.TotalRecords); Assert.AreEqual(2, before); Assert.AreEqual(2, after); }
public void Delimited0() { var engine = new FileHelperEngine<OptionalDelimited1>(); var res = TestCommon.ReadTest<OptionalDelimited1>(engine, "Good", "OptionalDelimited0.txt"); Assert.AreEqual(ExpectedRecords, res.Length); }
public void WriteFileNotifyPercent() { actualPerc = 0; actualAdd = 50; var engine = new FileHelperEngine<SampleType>(); engine.Progress += ProgressChangePercent; var res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1); res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile("prog1.txt", res); if (File.Exists("prog1.txt")) File.Delete("prog1.txt"); }
/// <summary> /// Open file and read into array, display unsorted /// </summary> private void frmSort_Load(object sender, System.EventArgs e) { FileHelperEngine engine = new FileHelperEngine(typeof (CustomersVerticalBar)); mRecords = (CustomersVerticalBar[]) engine.ReadString(SampleData); grid1.SelectedObject = mRecords; }
public void ReadOperationShouldBeQuick() { Benchmark.This("FileHelperEngine.ReadStream", () => { var engine = new FileHelperEngine<FixedSampleRecord>(); using (var stream = new StringReader(FixedFileSample)) { var records = engine.ReadStream(stream); records.Should().HaveCount(19); } }) .Against.This("FlatFileEngine.Read", () => { var layout = new FixedSampleRecordLayout(); using (var stream = new MemoryStream(Encoding.UTF8.GetBytes(FixedFileSample))) { var factory = new FixedLengthFileEngineFactory(); var flatFile = factory.GetEngine(layout); var records = flatFile.Read<FixedSampleRecord>(stream).ToArray(); records.Should().HaveCount(19); } }) .WithWarmup(1000) .For(10000) .Iterations() .PrintComparison(); }
//-> /File //-> Finally you must to instantiate a FileHelperEngine and write the file public override void Run() { //-> File:Example.cs var engine = new FileHelperEngine<Orders>(); var orders = new List<Orders>(); orders.Add(new Orders() { OrderID = 1, CustomerID = "AIRG", Freight = 82.43M, OrderDate = new DateTime(2009, 05, 01) }); orders.Add(new Orders() { OrderID = 2, CustomerID = "JSYV", Freight = 12.22M, OrderDate = new DateTime(2009, 05, 02) }); engine.WriteFile("Output.Txt", orders); //-> /File Console.WriteLine(engine.WriteString(orders)); }
public void WriteOperationShouldBeQuick() { var sampleRecords = GetRecords(); Benchmark.This("FileHelperEngine.WriteStream", () => { var engine = new FileHelperEngine<FixedSampleRecord>(); using (var stream = new MemoryStream()) using (var streamWriter = new StreamWriter(stream)) { engine.WriteStream(streamWriter, sampleRecords); } }) .Against.This("FlatFileEngine.Write", () => { var layout = new FixedSampleRecordLayout(); using (var stream = new MemoryStream()) { var factory = new FixedLengthFileEngineFactory(); var flatFile = factory.GetEngine(layout); flatFile.Write(stream, sampleRecords); } }) .WithWarmup(1000) .For(10000) .Iterations() .PrintComparison(); }
public void OrdersWrite() { FileHelperEngine engine = new FileHelperEngine(typeof(OrdersExcelType)); OrdersExcelType[] resFile = (OrdersExcelType[]) Common.ReadTest(engine, @"Good\OrdersWithOutDates.txt"); ExcelStorage provider = new ExcelStorage(typeof (OrdersExcelType)); provider.StartRow = 1; provider.StartColumn = 1; provider.FileName = @"c:\tempex.xls"; provider.OverrideFile = true; provider.InsertRecords(resFile); OrdersExcelType[] res = (OrdersExcelType[]) provider.ExtractRecords(); if (File.Exists(@"c:\tempex.xls")) File.Delete(@"c:\tempex.xls"); Assert.AreEqual(resFile.Length, res.Length); for(int i =0; i < res.Length; i++) { Assert.AreEqual(resFile[i].CustomerID, res[i].CustomerID); Assert.AreEqual(resFile[i].EmployeeID, res[i].EmployeeID); Assert.AreEqual(resFile[i].Freight, res[i].Freight); Assert.AreEqual(resFile[i].OrderID, res[i].OrderID); Assert.AreEqual(resFile[i].ShipVia, res[i].ShipVia); } }
public void ExactLength() { var engine = new FileHelperEngine<CustomerExact>(); engine.ErrorMode = ErrorMode.IgnoreAndContinue; var res = FileTest.Good.CustomersFixedExact .ReadWithEngine(engine); Check.That(res.Length).IsEqualTo(91); res = FileTest.Good.CustomersFixedLessChars10Records .ReadWithEngine(engine); Check.That(res.Length).IsEqualTo(81); res = FileTest.Good.CustomersFixedMoreChars5Records .ReadWithEngine(engine); Check.That(res.Length).IsEqualTo(86); res = FileTest.Good.CustomersFixedMoreVariable12Records .ReadWithEngine(engine); Check.That(res.Length).IsEqualTo(79); }
public void ReadFile() { typeof(SampleType).GetField("Field2"); //typeof(SampleType).GetFields(); //typeof(SampleType).GetFields(); //typeof(SampleType).GetFields(); for (int i = 0; i < 10; i++) { typeof(SampleType).GetField("Field2"); var engine = new FileHelperEngine<SampleType>(); typeof(SampleType).GetField("Field2"); Assert.AreEqual("Field1", engine.Options.FieldsNames[0]); Assert.AreEqual("Field2", engine.Options.FieldsNames[1]); Assert.AreEqual("Field3", engine.Options.FieldsNames[2]); SampleType[] res; res = TestCommon.ReadTest<SampleType>(engine, "Good", "Test1.txt"); Assert.AreEqual(4, res.Length); Assert.AreEqual(4, engine.TotalRecords); Assert.AreEqual(0, engine.ErrorManager.ErrorCount); Assert.AreEqual(new DateTime(1314, 12, 11), res[0].Field1); Assert.AreEqual("901", res[0].Field2); Assert.AreEqual(234, res[0].Field3); Assert.AreEqual(new DateTime(1314, 11, 10), res[1].Field1); Assert.AreEqual("012", res[1].Field2); Assert.AreEqual(345, res[1].Field3); } }
public void DiscardWriteRead() { engine = new FileHelperEngine(typeof(DiscardLastType1)); DiscardLastType1[] res = (DiscardLastType1[])Common.ReadTest(engine, @"Good\DiscardLast1.txt"); engine.FooterText = "This is a new Footer....\r\n"; engine.WriteFile("tempo.txt", res); engine.FooterText = "none none"; DiscardLastType1[] res2 = (DiscardLastType1[])engine.ReadFile(@"tempo.txt"); Assert.AreEqual(res.Length, res2.Length); Assert.AreEqual("This is a new Footer....\r\n", engine.FooterText); if (File.Exists("tempo.txt")) { File.Delete("tempo.txt"); } Assert.AreEqual(4, res.Length); Assert.AreEqual(new DateTime(1314, 12, 11), res[0].Field1); }
public IList <ISuspeciousItem> Load() { var suspeciousItems = new List <ISuspeciousItem>(); var engine = new FileHelperEngine <IndicatorENTT>(); var records = engine.ReadFile(_filePath); foreach (var record in records) { switch (record.IndicatorType.ToLower()) { case "hash": suspeciousItems.Add(new HashItem(record.Indicator.Trim())); break; case "domain": suspeciousItems.Add(new DomainItem(record.Indicator.Trim())); break; default: throw new ArgumentException("Indicator type is invalid"); } } return(suspeciousItems); }
public void ReadExcelCsv3() { var engine = new FileHelperEngine <ExcelCsv3Type>(); TestCommon.ReadTest <ExcelCsv3Type>(engine, "Good", "ExcelCsv2.txt"); }
static async Task <bool> CreateAssetsFromCSV() { // use the FileHelpers library to load data from CSV var engine = new FileHelperEngine <Asset>(); var filePath = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), @"Data\data.csv"); var assets = engine.ReadFile(filePath); // import all the things... var contentRepositories = await GetDefinitionItems("M.Content.Repository"); var enUs = CultureInfo.GetCultureInfo("en-US"); foreach (var asset in assets) { // creates a new asset (in memory. will not persist until explicitly saved.) var newEntity = await _client.EntityFactory.CreateAsync(Constants.Asset.DefinitionName); newEntity.SetPropertyValue("FileName", asset.File); newEntity.SetPropertyValue("Title", asset.Title); newEntity.SetPropertyValue("Description", enUs, asset.Description); // assign the asset to a repo var contentRepositoryToAssetRelation = newEntity.GetRelation("ContentRepositoryToAsset", RelationRole.Child) as IChildToManyParentsRelation; contentRepositoryToAssetRelation.Add((long)contentRepositories.Single(s => s.Identifier == asset.ContentRepositoryToAsset).Id); try { // persist the asset information var entityId = await _client.Entities.SaveAsync(newEntity); Console.WriteLine(String.Format("Created - Name: {0}, EntityId: {1}", asset.Title, entityId)); // set the lifecycle status. (it seems that assets MUST exist prior to this action.) switch (asset.FinalLifeCycleStatusToAsset) { case "M.Final.LifeCycle.Status.UnderReview": await _client.Assets.FinalLifeCycleManager.SubmitAsync(entityId); break; case "M.Final.LifeCycle.Status.Approved": await _client.Assets.FinalLifeCycleManager.DirectPublishAsync(entityId); break; } // fetch jobs still need to be created in order to pull the asset content... otherwise, they will just // sit there, empty and alone. var fetchJobRequest = new WebFetchJobRequest("Fetch file for entity.", entityId); fetchJobRequest.Urls.Add(new Uri(asset.File, UriKind.Absolute)); var jobId = await _client.Jobs.CreateFetchJobAsync(fetchJobRequest); Console.WriteLine(String.Format("Created Fetch Job - EntityId: {0} JobId: {1}", entityId, jobId)); } catch (ValidationException e) { // sad face... foreach (var failure in e.Failures) { Console.WriteLine(String.Format("Failure - Source: {0}, Message: {1}", failure.Source, failure.Message)); return(false); } } } return(true); }
protected virtual void BuildQueries() { var Engine = new FileHelperEngine <T>(); var r = Engine.ReadFile(ResumeName); }
public void ReadOptionalWrite() { engine = new FileHelperEngine(typeof(QuoteMode2)); Assert.Throws <BadUsageException>(() => Common.ReadTest(engine, @"Good\QuoteMode1.txt")); }
public void InNewLine4Bad() { engine = new FileHelperEngine(typeof(InNewLineType2)); engine.ReadFile(TestCommon.TestPath(@"Bad\InNewLine4.txt")); }
static void Main(string[] args) { //Remember a backslash is a special escape character //If you want a line with a quote in the middle of the line you have to "escape" //the special meaning of a quote. Uncomment each line and see which one //passes //string illegalQuoteString = "Don't "Quote" me on that!"; string properlyEscapedQuoteString = "Don't \"Quote\" me on that!"; //The same way the Dollar sign can be used for string interpolation //the @ symbol can be used to automatically escape characters between //the quotes. So a file path stored in a variable could be assigned like: string myExampleFilePath = "C:\\Some\\Path\\To\\A\\File"; //but it's easier to use the @ string myBetterExampleFilePath = @"C:\Some\Path\To\A\File"; //both are correct but I think the second one is easier to read Console.WriteLine("*** Creating the list by paring the file ourselves ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //Create a Roster --A list of RosterItems (Yes I renamed them since the meetup) //A list is nothing more than more than one of a class //This is a generic list List<FillInTheBlankWithYourObject> //It automatically gives us methods like Add(), Remove(), Find() and FindAll() //The <> angle brackets is a SIGNIFICANT pattern in c# you will find other libraries that //use the same pattern. When you see them you usually fill in the blank with a TYPE //The next line instantiates the generic list List <RosterItem> myRoster = new List <RosterItem>(); //using System.IO file methods we can open the file and Read each line one at a time //Let's read the CSV file straight into our list //we don't have to include the entire file path since we set the file to "Copy Always" //this places the file in the same debug directory as our EXE myRoster = ReadFileToRosterList("Roster.csv"); //loop through the list and write the first and last name of each roster item //along with their position foreach (RosterItem item in myRoster) { //by putting the dollar sign in fromt of the string we're telling //c# that we want the variable names inside the string to be translated //to their values (String Interpolation) Console.WriteLine($"Player: {item.FirstName} {item.LastName}, Position: {item.Position}"); //lets add some drama by adding a quarter a second wait between lines System.Threading.Thread.Sleep(250); } Console.WriteLine(""); Console.WriteLine("*** Now let's do it using the file helpers library ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //Convert a CSV file to an object list //using FileHelpers (downloaded via nuget) var engine = new FileHelperEngine <RosterItem>(); //we don't want to read the header of the file engine.Options.IgnoreFirstLines = 1; //Now we do it an easier way var result = engine.ReadFile("Roster.csv"); //we managed to convert the file to our class //and only had to use 4 lines of code and add //an attribute of [DelimitedRecord(",")] to the class //pretty easy //lets print the info to screen again //this time we'll use a for loop instead of a foreach loop //loop through the list and write the first and last name of each roster item //along with their position for (int i = 0; i < myRoster.Count; i++) { //assign the item in the array to item //I think this makes the code a bit cleaner //you can, of course, just use myRoster[i].FirstName RosterItem item = myRoster[i]; Console.WriteLine($"Player: {item.FirstName} {item.LastName}, Position: {item.Position}"); //lets add some drama by adding a quarter a second wait between lines System.Threading.Thread.Sleep(250); } //so when should we use a for loop insatead of a foreach? when we need to //track the item number or when we need a counter for the items we're looping through //now that we have the list of items //let's "Serialize" them //The easy way to think about serialzation is that it's the process of //turning an object or object list into text (usually XML or JSON) //deserilization is the process of taking serialized text and turning it //back inot the object or object list //this code requires the XML.Serialzation library Console.WriteLine(""); Console.WriteLine("*** Now let's serialize our list ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); // Create a new XmlSerializer instance with the type of the test class XmlSerializer SerializerObj = new XmlSerializer(typeof(List <RosterItem>)); // Create a new filestream so we can write the serialized object to a file using (TextWriter WriteFileStream = new StreamWriter("roster.xml")) { //serialize our list<> SerializerObj.Serialize(WriteFileStream, myRoster); } //open the xml file with notepad //how did I figure out how to open a file in notepad? //easy I googled "C# Open File Notepad" Console.WriteLine("Opening our serialized data in notepad..."); System.Threading.Thread.Sleep(500); Process.Start("notepad.exe", "roster.xml"); Console.WriteLine(""); Console.WriteLine("*** Now let's deserialize our list ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //Deserialze the XML file // Create a new file stream for reading the XML file using (FileStream ReadFileStream = new FileStream("roster.xml", FileMode.Open, FileAccess.Read, FileShare.Read)) { // Load the object saved above by using the Deserialize function //the parenthesses mean we are "casting" the return object to that type List <RosterItem> reLoadedRoster = (List <RosterItem>)SerializerObj.Deserialize(ReadFileStream); //how many items are in our reloaded list Console.WriteLine($"We reloaded {reLoadedRoster.Count} items from our xml file."); } Console.WriteLine(""); Console.WriteLine("*** Now let's add a new item to the list ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //one of the challanges of adding a new entry is that it must be valid data //whenever possible you should prevent the user from entering bad data //in a windows or web application this is easier since most data being entered //is from a control (dropdown list or textbox), it's much more difficult to validate //when the user can only enter text, like in our console app //we're going to need to get the highest ID number so the next entry //will have a unique ID //This is a bit of a cheat since I know the items in the file we read in are numbered sequentially //so a simple count of the number of the items in the list + 1 should give us the next ID //there are probably a 100 other ways we can do this int NextId = myRoster.Count + 1; //get a new roster item //I broke this into its own method RosterItem newRosterItem = GetRosterItem(NextId); //Add the new item to the list myRoster.Add(newRosterItem); Console.WriteLine($"There are now {myRoster.Count} items in our list."); Console.WriteLine($"You just added {newRosterItem.FirstName} {newRosterItem.LastName} to the roster."); //Console.WriteLine(myString); Console.ReadLine(); }
public static object[] ReadTest(FileHelperEngine engine, params string[] pathElements) { return(engine.ReadFile(GetPath(pathElements))); }
public IEnumerable <DatasetWeatherEvent> ParseWeatherEvents() { var currentDirectory = System.IO.Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); // Nullity check, although this is known not to be null if (string.IsNullOrWhiteSpace(currentDirectory)) { throw new System.IO.FileNotFoundException("Current Directory is null"); } // Check if csv exists #if DEBUG var weatherReportCsvFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationDebug, WeatherCsv)); #endif #if !DEBUG var weatherReportCsvFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationRelease, WeatherCsv)); #endif var csvExists = System.IO.File.Exists(weatherReportCsvFullLocation); // if csv is missing, extract it from the zip if (!csvExists) { // Check if zip exists #if DEBUG var weatherReportZipLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, WeatherZipDebug)); #endif #if !DEBUG var weatherReportZipLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, WeatherZipRelease)); #endif var zipExists = System.IO.File.Exists(weatherReportZipLocation); if (!zipExists) { throw new System.IO.FileNotFoundException("Zip is missing from the folder FIWAREHub.Datasets"); } #if DEBUG var extractLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationDebug)); #endif #if !DEBUG var extractLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationRelease)); #endif System.IO.Compression.ZipFile.ExtractToDirectory(weatherReportZipLocation, extractLocation); csvExists = System.IO.File.Exists(weatherReportCsvFullLocation); if (!csvExists) { throw new ArgumentException("CSV Not found even after extraction attempt. Possible zip corruption. Please try downloading the datasets again."); } } var engine = new FileHelperEngine <DatasetWeatherEvent>(); var weatherEvents = engine .ReadFile(weatherReportCsvFullLocation) .ToList(); var severityScale = weatherEvents.Select(we => we.Severity).DistinctBy(s => s).ToList(); return(weatherEvents); }
static void Main(string[] args) { #region Codes to names dictionaries Dictionary <string, string> states = new Dictionary <string, string> { { "11", "RO" }, { "12", "AC" }, { "13", "AM" }, { "14", "RR" }, { "15", "PA" }, { "16", "AP" }, { "17", "TO" }, { "21", "MA" }, { "22", "PI" }, { "23", "CE" }, { "24", "RN" }, { "25", "PB" }, { "26", "PE" }, { "27", "AL" }, { "28", "SE" }, { "29", "BA" }, { "31", "MG" }, { "32", "ES" }, { "33", "RJ" }, { "35", "SP" }, { "41", "PR" }, { "42", "SC" }, { "43", "RS" }, { "50", "MS" }, { "51", "MT" }, { "52", "GO" }, { "53", "DF" } }; Dictionary <string, string> courses = new Dictionary <string, string> { { "21", "ARQUITETURA E URBANISMO" }, { "72", "TECNOLOGIA EM ANÁLISE E DESENVOLVIMENTO DE SISTEMAS" }, { "73", "TECNOLOGIA EM AUTOMAÇÃO INDUSTRIAL" }, { "76", "TECNOLOGIA EM GESTÃO DA PRODUÇÃO INDUSTRIAL" }, { "79", "TECNOLOGIA EM REDES DE COMPUTADORES" }, { "701", "MATEMÁTICA(BACHARELADO)" }, { "702", "MATEMÁTICA(LICENCIATURA)" }, { "903", "LETRAS - PORTUGUÊS(BACHARELADO)" }, { "904", "LETRAS - PORTUGUÊS(LICENCIATURA)" }, { "905", "LETRAS - PORTUGUÊS E INGLÊS(LICENCIATURA)" }, { "906", "LETRAS - PORTUGUÊS E ESPANHOL(LICENCIATURA)" }, { "1401", "FÍSICA(BACHARELADO)" }, { "1402", "FÍSICA(LICENCIATURA)" }, { "1501", "QUÍMICA(BACHARELADO)" }, { "1502", "QUÍMICA(LICENCIATURA)" }, { "1601", "CIÊNCIAS BIOLÓGICAS(BACHARELADO)" }, { "1602", "CIÊNCIAS BIOLÓGICAS(LICENCIATURA)" }, { "2001", "PEDAGOGIA(LICENCIATURA)" }, { "2401", "HISTÓRIA(BACHARELADO)" }, { "2402", "HISTÓRIA(LICENCIATURA)" }, { "2501", "ARTES VISUAIS(LICENCIATURA)" }, { "3001", "GEOGRAFIA(BACHARELADO)" }, { "3002", "GEOGRAFIA(LICENCIATURA)" }, { "3201", "FILOSOFIA(BACHARELADO)" }, { "3202", "FILOSOFIA(LICENCIATURA)" }, { "3502", "EDUCAÇÃO FÍSICA(LICENCIATURA)" }, { "4004", "CIÊNCIA DA COMPUTAÇÃO(BACHARELADO)" }, { "4005", "CIÊNCIA DA COMPUTAÇÃO(LICENCIATURA)" }, { "4006", "SISTEMAS DE INFORMAÇÃO" }, { "4301", "MÚSICA(LICENCIATURA)" }, { "5401", "CIÊNCIAS SOCIAIS(BACHARELADO)" }, { "5402", "CIÊNCIAS SOCIAIS(LICENCIATURA)" }, { "5710", "ENGENHARIA CIVIL" }, { "5806", "ENGENHARIA ELÉTRICA" }, { "5809", "ENGENHARIA DE COMPUTAÇÃO" }, { "5814", "ENGENHARIA DE CONTROLE E AUTOMAÇÃO" }, { "5902", "ENGENHARIA MECÂNICA" }, { "6008", "ENGENHARIA QUÍMICA" }, { "6009", "ENGENHARIA DE ALIMENTOS" }, { "6208", "ENGENHARIA DE PRODUÇÃO" }, { "6306", "ENGENHARIA" }, { "6307", "ENGENHARIA AMBIENTAL" }, { "6405", "ENGENHARIA FLORESTAL" } }; Dictionary <string, string> incomes = new Dictionary <string, string> { { "a", "Até 1,5 salário mínimo" }, { "b", "De 1,5 a 3 salários mínimos" }, { "c", "De 3 a 4,5 salários mínimos" }, { "d", "De 4,5 a 6 salários mínimos" }, { "e", "De 6 a 10 salários mínimos" }, { "f", "De 10 a 30 salários mínimos" }, { "g", "Acima de 30 salários mínimos" }, { "", "Não Informado" } }; #endregion Dictionary <string, List <double> > stateGrades = new Dictionary <string, List <double> >(); Dictionary <string, Dictionary <string, List <double> > > courseIncomeGrades = new Dictionary <string, Dictionary <string, List <double> > >(); Dictionary <string, Dictionary <string, Dictionary <string, List <double> > > > courseStateIncomeGrades = new Dictionary <string, Dictionary <string, Dictionary <string, List <double> > > >(); Dictionary <string, int> countIncomes = new Dictionary <string, int>(); Dictionary <string, Dictionary <string, Dictionary <string, int> > > courseStateIncomeCount = new Dictionary <string, Dictionary <string, Dictionary <string, int> > >(); Dictionary <string, Dictionary <string, int> > stateIncomeCount = new Dictionary <string, Dictionary <string, int> >(); Dictionary <string, Dictionary <string, int> > courseIncomeCount = new Dictionary <string, Dictionary <string, int> >(); Dictionary <string, Dictionary <string, List <double> > > stateCourseGrades = new Dictionary <string, Dictionary <string, List <double> > >(); FileHelperEngine <EnadeObject> readEngine = new FileHelperEngine <EnadeObject>(); int count = 1; Console.WriteLine("Reading file and coverting to object"); var records = readEngine.ReadFile("C:\\Users\\leobr\\Dropbox\\UFF\\Visualização de dados\\microdados_enade_2014\\2.DADOS\\microdados_enade_2014_editado.csv"); foreach (EnadeObject currentRecord in records) { Console.WriteLine("Getting record " + count + " from " + records.Length); // Filling state/grades dictionary if (stateGrades.ContainsKey(currentRecord.codigoUF)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateGrades[currentRecord.codigoUF].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateGrades.Add(currentRecord.codigoUF, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { stateGrades.Add(currentRecord.codigoUF, new List <double>()); } } //Filling course/income/grades dictionary if (courseIncomeGrades.ContainsKey(currentRecord.codigoCurso)) { if (courseIncomeGrades[currentRecord.codigoCurso].ContainsKey(currentRecord.faixaDeRenda)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseIncomeGrades[currentRecord.codigoCurso][currentRecord.faixaDeRenda].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double>()); } } } else { courseIncomeGrades.Add(currentRecord.codigoCurso, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double>()); } } //Filling course/state/income/grades dictionary if (courseStateIncomeGrades.ContainsKey(currentRecord.codigoCurso)) { if (courseStateIncomeGrades[currentRecord.codigoCurso].ContainsKey(currentRecord.codigoUF)) { //If have all keys insert grade if (courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].ContainsKey(currentRecord.faixaDeRenda)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF][currentRecord.faixaDeRenda].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> ()); } } } else { // If don't have codigo uf key courseStateIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double>()); } } } else { // If don't have any key courseStateIncomeGrades.Add(currentRecord.codigoCurso, new Dictionary <string, Dictionary <string, List <double> > >()); courseStateIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double>()); } } // Count incomes if (countIncomes.ContainsKey(currentRecord.faixaDeRenda)) { countIncomes[currentRecord.faixaDeRenda]++; } else { countIncomes.Add(currentRecord.faixaDeRenda, 1); } // Course/State/CountIncome //Filling course/state/income/grades dictionary if (courseStateIncomeCount.ContainsKey(currentRecord.codigoCurso)) { if (courseStateIncomeCount[currentRecord.codigoCurso].ContainsKey(currentRecord.codigoUF)) { //If have all keys insert grade if (courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].ContainsKey(currentRecord.faixaDeRenda)) { courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF][currentRecord.faixaDeRenda]++; } else { courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } } else { // If don't have codigo uf key courseStateIncomeCount[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, int>()); courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } } else { // If don't have any key courseStateIncomeCount.Add(currentRecord.codigoCurso, new Dictionary <string, Dictionary <string, int> >()); courseStateIncomeCount[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, int>()); courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } //State/Income Count //Filling course/income/grades dictionary if (stateIncomeCount.ContainsKey(currentRecord.codigoUF)) { if (stateIncomeCount[currentRecord.codigoUF].ContainsKey(currentRecord.faixaDeRenda)) { stateIncomeCount[currentRecord.codigoUF][currentRecord.faixaDeRenda]++; } else { stateIncomeCount[currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } } else { stateIncomeCount.Add(currentRecord.codigoUF, new Dictionary <string, int>()); stateIncomeCount[currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } //Course/Income Count if (courseIncomeCount.ContainsKey(currentRecord.codigoCurso)) { if (courseIncomeCount[currentRecord.codigoCurso].ContainsKey(currentRecord.faixaDeRenda)) { courseIncomeCount[currentRecord.codigoCurso][currentRecord.faixaDeRenda]++; } else { courseIncomeCount[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, 1); } } else { courseIncomeCount.Add(currentRecord.codigoCurso, new Dictionary <string, int>()); courseIncomeCount[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, 1); } //Filling state/course/grades dictionary if (stateCourseGrades.ContainsKey(currentRecord.codigoUF)) { if (stateCourseGrades[currentRecord.codigoUF].ContainsKey(currentRecord.codigoCurso)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateCourseGrades[currentRecord.codigoUF][currentRecord.codigoCurso].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double>()); } } } else { stateCourseGrades.Add(currentRecord.codigoUF, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double>()); } } count++; } Console.WriteLine("Writing files"); #region CSV FILES //Writing Files List <WriteFileObject> stateGradesRecords = new List <WriteFileObject>(); List <WriteFileObject> courseIncomeGradesRecords = new List <WriteFileObject>(); WriteFileObject record; double gradesSum = 0; FileHelperEngine <WriteFileObject> writeEngine = new FileHelperEngine <WriteFileObject>(); // States/grades file foreach (string currentState in stateGrades.Keys) { record = new WriteFileObject(); gradesSum = 0; // Changing states codes to names, to write the file if (states.ContainsKey(currentState)) { record.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } // Calculating average grade foreach (double currentGrade in stateGrades[currentState]) { gradesSum += currentGrade; } record.average = Math.Round((gradesSum / stateGrades[currentState].Count), 2); stateGradesRecords.Add(record); } writeEngine.WriteFile("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\StatesGrades.csv", stateGradesRecords); // Course/Income/Grades file foreach (string currentCourse in courseIncomeGrades.Keys) { foreach (string currentIncome in courseIncomeGrades[currentCourse].Keys) { record = new WriteFileObject(); gradesSum = 0; // Changing courses and incomes codes to names, to write the file if (courses.ContainsKey(currentCourse)) { record.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } if (incomes.ContainsKey(currentIncome)) { record.income = incomes[currentIncome]; } else { Console.WriteLine("Faixa de renda não encontrada na lista"); continue; } // Calculating average grade foreach (double currentGrade in courseIncomeGrades[currentCourse][currentIncome]) { gradesSum += currentGrade; } record.average = Math.Round((gradesSum / courseIncomeGrades[currentCourse][currentIncome].Count), 2); courseIncomeGradesRecords.Add(record); } } writeEngine.WriteFile("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\CourseIncomeGrades.csv", courseIncomeGradesRecords); #endregion #region JSON FILES CourseIncomeObject ciObj; StreamWriter sw = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\courseIncomeGrade.json"); // Course/Income/Grades file foreach (string currentCourse in courseIncomeGrades.Keys) { ciObj = new CourseIncomeObject(); foreach (string currentIncome in courseIncomeGrades[currentCourse].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { ciObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } switch (currentIncome) { case "a": ciObj.incomes.a = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "b": ciObj.incomes.b = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "c": ciObj.incomes.c = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "d": ciObj.incomes.d = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "e": ciObj.incomes.e = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "f": ciObj.incomes.f = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "g": ciObj.incomes.g = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; default: ciObj.incomes.naoInformado = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; } } //Writting on file sw.WriteLine(JsonConvert.SerializeObject(ciObj)); sw.Flush(); } sw.Close(); CourseStateIncomeObject csiObj; StreamWriter sw2 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\courseStateIncomeGrade.json"); // Course/Income/Grades file foreach (string currentCourse in courseStateIncomeGrades.Keys) { foreach (string currentState in courseStateIncomeGrades[currentCourse].Keys) { csiObj = new CourseStateIncomeObject(); foreach (string currentIncome in courseStateIncomeGrades[currentCourse][currentState].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { csiObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } if (states.ContainsKey(currentState)) { csiObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } switch (currentIncome) { case "a": csiObj.incomes.a = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "b": csiObj.incomes.b = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "c": csiObj.incomes.c = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "d": csiObj.incomes.d = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "e": csiObj.incomes.e = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "f": csiObj.incomes.f = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "g": csiObj.incomes.g = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; default: csiObj.incomes.naoInformado = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; } } //Writting on file sw2.WriteLine(JsonConvert.SerializeObject(csiObj)); sw2.Flush(); } } sw2.Close(); // Count Income CountIncomeObject countIncomeObj; StreamWriter sw3 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\countIncome.json"); foreach (string currentIncome in countIncomes.Keys) { countIncomeObj = new CountIncomeObject(); if (incomes.ContainsKey(currentIncome)) { countIncomeObj.income = incomes[currentIncome]; } else { Console.WriteLine("Renda não encontrada na lista"); continue; } countIncomeObj.count = countIncomes[currentIncome]; sw3.WriteLine(JsonConvert.SerializeObject(countIncomeObj)); sw3.Flush(); } sw3.Close(); CourseStateIncomeCountObject courseStateIncCountObj = new CourseStateIncomeCountObject(); StreamWriter sw4 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\CourseStateIncomeCount.json"); foreach (string currentCourse in courseStateIncomeCount.Keys) { foreach (string currentState in courseStateIncomeCount[currentCourse].Keys) { courseStateIncCountObj = new CourseStateIncomeCountObject(); foreach (string currentIncome in courseStateIncomeCount[currentCourse][currentState].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { courseStateIncCountObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } if (states.ContainsKey(currentState)) { courseStateIncCountObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } switch (currentIncome) { case "a": courseStateIncCountObj.incomes.a = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "b": courseStateIncCountObj.incomes.b = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "c": courseStateIncCountObj.incomes.c = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "d": courseStateIncCountObj.incomes.d = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "e": courseStateIncCountObj.incomes.e = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "f": courseStateIncCountObj.incomes.f = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "g": courseStateIncCountObj.incomes.g = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; default: courseStateIncCountObj.incomes.naoInformado = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; } } //Writting on file sw4.WriteLine(JsonConvert.SerializeObject(courseStateIncCountObj)); sw4.Flush(); } } sw4.Close(); StateIncomeCountObject stateIncomeCountObj = new StateIncomeCountObject(); StreamWriter sw5 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\StateIncomeCount.json"); foreach (string currentState in stateIncomeCount.Keys) { stateIncomeCountObj = new StateIncomeCountObject(); foreach (string currentIncome in stateIncomeCount[currentState].Keys) { // Changing courses codes to names, to write the file if (states.ContainsKey(currentState)) { stateIncomeCountObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } switch (currentIncome) { case "a": stateIncomeCountObj.incomes.a = stateIncomeCount[currentState][currentIncome]; break; case "b": stateIncomeCountObj.incomes.b = stateIncomeCount[currentState][currentIncome]; break; case "c": stateIncomeCountObj.incomes.c = stateIncomeCount[currentState][currentIncome]; break; case "d": stateIncomeCountObj.incomes.d = stateIncomeCount[currentState][currentIncome]; break; case "e": stateIncomeCountObj.incomes.e = stateIncomeCount[currentState][currentIncome]; break; case "f": stateIncomeCountObj.incomes.f = stateIncomeCount[currentState][currentIncome]; break; case "g": stateIncomeCountObj.incomes.g = stateIncomeCount[currentState][currentIncome]; break; default: stateIncomeCountObj.incomes.naoInformado = stateIncomeCount[currentState][currentIncome]; break; } } //Writting on file sw5.WriteLine(JsonConvert.SerializeObject(stateIncomeCountObj)); sw5.Flush(); } sw5.Close(); CourseIncomeCountObject courseIncomeCountObj = new CourseIncomeCountObject(); StreamWriter sw6 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\CourseIncomeCount.json"); foreach (string currentCourse in courseIncomeCount.Keys) { courseIncomeCountObj = new CourseIncomeCountObject(); foreach (string currentIncome in courseIncomeCount[currentCourse].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { courseIncomeCountObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } switch (currentIncome) { case "a": courseIncomeCountObj.incomes.a = courseIncomeCount[currentCourse][currentIncome]; break; case "b": courseIncomeCountObj.incomes.b = courseIncomeCount[currentCourse][currentIncome]; break; case "c": courseIncomeCountObj.incomes.c = courseIncomeCount[currentCourse][currentIncome]; break; case "d": courseIncomeCountObj.incomes.d = courseIncomeCount[currentCourse][currentIncome]; break; case "e": courseIncomeCountObj.incomes.e = courseIncomeCount[currentCourse][currentIncome]; break; case "f": courseIncomeCountObj.incomes.f = courseIncomeCount[currentCourse][currentIncome]; break; case "g": courseIncomeCountObj.incomes.g = courseIncomeCount[currentCourse][currentIncome]; break; default: courseIncomeCountObj.incomes.naoInformado = courseIncomeCount[currentCourse][currentIncome]; break; } } //Writting on file sw6.WriteLine(JsonConvert.SerializeObject(courseIncomeCountObj)); sw6.Flush(); } sw6.Close(); StateCourseGradeObject stateCourseGradeObj = new StateCourseGradeObject(); StreamWriter sw7 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\StateCourseGrade.json"); foreach (string currentState in stateCourseGrades.Keys) { foreach (string currentCourse in stateCourseGrades[currentState].Keys) { stateCourseGradeObj = new StateCourseGradeObject(); // Changing courses codes to names, to write the file if (states.ContainsKey(currentState)) { stateCourseGradeObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { stateCourseGradeObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } stateCourseGradeObj.grade = CalculateStateCourseAvgGrade(stateCourseGrades, currentState, currentCourse); //Writting on file sw7.WriteLine(JsonConvert.SerializeObject(stateCourseGradeObj)); sw7.Flush(); } } sw7.Close(); #endregion Console.WriteLine("Arquivos finalizados"); }
private FileHelperEngine CreateEngineAndClearErrors() { var engine = new FileHelperEngine(mRecordInfo);
public IEnumerable <FiwareCombinedReport> ParseAccidentsDataset() { var currentDirectory = System.IO.Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); // Nullity check, although this is known not to be null if (string.IsNullOrWhiteSpace(currentDirectory)) { throw new System.IO.FileNotFoundException("Current Directory is null"); } // Check if csv exists #if DEBUG var accidentCsvFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationDebug, AccidentCsv)); #endif #if !DEBUG var accidentCsvFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationRelease, AccidentCsv)); #endif var csvExists = System.IO.File.Exists(accidentCsvFullLocation); // if csv is missing, extract it from the zip if (!csvExists) { // Check if zip exists #if DEBUG var accidentZipFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, AccidentZipDebug)); #endif #if !DEBUG var accidentZipFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, AccidentZipRelease)); #endif var zipExists = System.IO.File.Exists(accidentZipFullLocation); if (!zipExists) { throw new System.IO.FileNotFoundException("Zip is missing from the folder FIWAREHub.Datasets"); } #if DEBUG var extractLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationDebug)); #endif #if !DEBUG var extractLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationRelease)); #endif System.IO.Compression.ZipFile.ExtractToDirectory(accidentZipFullLocation, extractLocation); csvExists = System.IO.File.Exists(accidentCsvFullLocation); if (!csvExists) { throw new ArgumentException("CSV Not found even after extraction attempt. Possible zip corruption. Please try downloading the datasets again."); } } // Deserialization of statistic-friendly weather mappings #if DEBUG var mappingsFileLocation = MappingsLocationDebug; #endif #if !DEBUG var mappingsFileLocation = MappingsLocationRelease; #endif var mappingsFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, mappingsFileLocation)); var mappingExists = System.IO.File.Exists(mappingsFullLocation); if (!mappingExists) { throw new System.IO.FileNotFoundException("Weather mappings json is missing."); } var fileAsString = System.IO.File.ReadAllText(mappingsFullLocation); var weatherMappings = Newtonsoft.Json.JsonConvert.DeserializeObject <WeatherMappings>(fileAsString); // File helper engine to deserialize CSV data set var engine = new FileHelperEngine <DatasetAccidentReport>(); var accidents = engine .ReadFile(accidentCsvFullLocation) .ToList(); // Two top states with most accidents var accidentsOfTwoTopStates = accidents // Year 2017, 2018 .Where(a => new[] { 2017, 2018 }.Contains(a.StartTime.GetValueOrDefault().Year)) // Omits accident reports without weather condition .Where(a => !string.IsNullOrWhiteSpace(a.WeatherCondition)) .GroupBy(a => a.State) .OrderByDescending(g => g.Count()) .Take(2) .SelectMany(g => g.ToList()) .Select((ar, idx) => new FiwareCombinedReport { FiwareWeatherReport = new FiwareWeatherReport(ar.WeatherCondition, weatherMappings, ar, idx + 1), FiwareTrafficDataReport = new FiwareTrafficReport(ar, idx + 1) }) .ToList(); return(accidentsOfTwoTopStates); }
public void FieldOptionalPlusFieldOrderGood2() { var engine = new FileHelperEngine <FieldOptionalPlusFieldOrderTypeGood2>(); Check.That(engine.Options.FieldCount).IsEqualTo(5); }
protected void ReadParameters() { char pathsep = System.IO.Path.DirectorySeparatorChar; PCSParam[] pcsaftdatac = null; FileHelperEngine <PCSParam> fh1 = new FileHelperEngine <PCSParam>(); var res = System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceNames(); using (Stream filestr = System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream("DWSIM.Thermodynamics.AdvancedEOS.Data.pcsaft.dat")) { using (StreamReader t = new StreamReader(filestr)) { pcsaftdatac = fh1.ReadStream(t); } } foreach (PCSParam pcsaftdata in pcsaftdatac) { if (pcsaftdata.kAiBi != 0d) { pcsaftdata.associationparams = "2\n[0 " + pcsaftdata.kAiBi + "; " + pcsaftdata.kAiBi + " 0]\n[0 " + pcsaftdata.epsilon2 + "; " + pcsaftdata.epsilon2 + " 0]"; } if (!CompoundParameters.ContainsKey(pcsaftdata.casno)) { CompoundParameters.Add(pcsaftdata.casno, pcsaftdata); } } fh1 = null; PCSIP[] pripc = null; FileHelperEngine <PCSIP> fh2 = new FileHelperEngine <PCSIP>(); using (Stream filestr = System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream("DWSIM.Thermodynamics.AdvancedEOS.Data.pcsaft_ip.dat")) { using (StreamReader t = new StreamReader(filestr)) { pripc = fh2.ReadStream(t); } } foreach (PCSIP ip in pripc) { if (InteractionParameters.ContainsKey(ip.casno1)) { if (InteractionParameters[ip.casno1].ContainsKey(ip.casno2)) { } else { InteractionParameters[ip.casno1].Add(ip.casno2, (PCSIP)ip.Clone()); } } else { InteractionParameters.Add(ip.casno1, new Dictionary <string, PCSIP>()); InteractionParameters[ip.casno1].Add(ip.casno2, (PCSIP)ip.Clone()); } } foreach (PCSIP ip in pripc) { if (InteractionParameters.ContainsKey(ip.casno1)) { if (InteractionParameters[ip.casno1].ContainsKey(ip.casno2)) { } else { InteractionParameters[ip.casno1].Add(ip.casno2, (PCSIP)ip.Clone()); } } else { InteractionParameters.Add(ip.casno1, new Dictionary <string, PCSIP>()); InteractionParameters[ip.casno1].Add(ip.casno2, (PCSIP)ip.Clone()); } } pripc = null; fh2 = null; }
public List <T> Load <T>(string path) where T : class { var engine = new FileHelperEngine <T>(Encoding.UTF8); return(engine.ReadFile(path).ToList()); }
public void Setup() { engine = new FileHelperEngine <SampleType>(); }
public UrenCSVReader(IOptions <ApplicationSettings> applicationSettings) { _engine = new FileHelperEngine <UrenCSV>(); _applicationSettings = applicationSettings.Value; }
public void Import(IObjectSpace os, String file_name) { FileHelperEngine <DealDataImport> engine = new FileHelperEngine <DealDataImport>(); engine.Options.IgnoreFirstLines = 1; engine.Options.IgnoreEmptyLines = true; // DealDataImport[] deal_data = engine.ReadStream(reader); DealDataImport[] deal_data = engine.ReadFile(file_name); IList <fmCOrder> orders = new List <fmCOrder>(); IList <crmStage> stages = new List <crmStage>(); IList <crmDeliveryUnit> delivery_units = new List <crmDeliveryUnit>(); IList <crmDeliveryItem> delivery_items = new List <crmDeliveryItem>(); IList <crmPaymentUnit> payment_units = new List <crmPaymentUnit>(); IList <crmPaymentItem> payment_items = new List <crmPaymentItem>(); IList <csMaterial> materials = os.GetObjects <csMaterial>(); foreach (DealDataImport record in deal_data) { fmCOrder order = null; crmStage stage = null; crmDeliveryUnit delivery_unit = null; crmDeliveryItem delivery_item = null; crmPaymentUnit payment_unit = null; crmPaymentItem payment_item = null; if (String.IsNullOrEmpty(record.StageCode)) { throw new ArgumentException("Stage Code is Empty", "StageCode"); } if (record.StageCode.Substring(0, 3) == "Adv") { stage = StageStructure.FirstStage; } else { stage = StageStructure.Stages.FirstOrDefault(x => x.Code == record.StageCode); if (stage == null) { stage = StageStructure.FirstStage.SubStagesCreate(); stage.Code = record.StageCode; } if (!stages.Contains(stage)) { stage.StageType = Contract.StageType.FINANCE; stage.DeliveryMethod = DeliveryMethod.UNITS_SHEDULE; stage.PaymentMethod = PaymentMethod.SCHEDULE; // stage.DateEnd = stage.DateBegin; // stage.DateFinish = stage.DateEnd; stages.Add(stage); } } if (record.StageCode.Substring(0, 3) != "Adv") { if (String.IsNullOrEmpty(record.OrderCode)) { throw new ArgumentException("Order Code is Empty", "OrderCode"); } order = orders.FirstOrDefault(x => x.Code == record.OrderCode); if (order == null) { order = os.FindObject <fmCOrder>(new BinaryOperator("Code", record.OrderCode, BinaryOperatorType.Equal)); if (order == null) { throw new ArgumentException("Order unknow", "OrderCode"); } else { orders.Add(order); } stage.Order = order; } if (record.DateContract == null) { throw new ArgumentException("Date Contract is Empty", "DateContract"); } delivery_unit = stage.DeliveryPlan.DeliveryUnits.FirstOrDefault(x => x.DatePlane == record.DateContract); if (record.DateContract > stage.DateEnd) { stage.DateEnd = (DateTime)record.DateContract; } if (delivery_unit == null) { delivery_unit = stage.DeliveryPlan.DeliveryUnitCreate(); delivery_unit.DatePlane = (DateTime)record.DateContract; } if (!delivery_units.Contains(delivery_unit)) { delivery_units.Add(delivery_unit); } delivery_unit.Order = order; if (record.Count == null) { throw new ArgumentException("Count is Empty", "Count"); } if (record.Price == null) { throw new ArgumentException("Price is Empty", "Price"); } if (String.IsNullOrEmpty(record.NomenclatureCode)) { throw new ArgumentException("Nomenclature Code is Empty", "NomenclatureCode"); } if (!record.NomenclatureCode.Contains("*I") && !record.NomenclatureCode.Contains("*E")) { csMaterial material = materials.FirstOrDefault(x => x.CodeTechnical == record.NomenclatureCode); if (material == null) { throw new ArgumentException("Nomenclature unknow", "NomenclatureCode"); } delivery_item = delivery_unit.DeliveryItems.FirstOrDefault(x => x.Nomenclature == material); if (delivery_item == null) { delivery_item = delivery_unit.DeliveryItemsCreateMaterial(); ((crmDeliveryMaterial)delivery_item).Material = material; } delivery_item.CostCalculateMethod = CostCalculateMethod.CALC_COST; delivery_item.NDSCalculateMethod = NDSCalculateMethod.FROM_COST; delivery_item.FullCalculateMethod = FullCalculateMethod.CALC_FULL; delivery_item.Price = (Decimal)record.Price; delivery_item.CountUnit = delivery_item.Nomenclature.BaseUnit; if (delivery_items.Contains(delivery_item)) { delivery_item.CountValue += (Decimal)record.Count; } else { delivery_item.CountValue = (Decimal)record.Count; delivery_items.Add(delivery_item); } } } if (record.DateContract == null) { throw new ArgumentException("Date Contract is Empty", "DateContract"); } payment_unit = stage.PaymentPlan.PaymentUnits.FirstOrDefault(x => x.DatePlane == record.DateContract && x is crmPaymentCasheLess); if (payment_unit == null) { payment_unit = stage.PaymentPlan.PaymentCasheLessCreate(); payment_unit.DatePlane = (DateTime)record.DateContract; if (payment_unit.DatePlane > stage.DateFinish) { stage.DateFinish = payment_unit.DatePlane; } } if (!payment_units.Contains(payment_unit)) { ((crmPaymentCasheLess)payment_unit).SummFull = (Decimal)record.SummaPayment; payment_units.Add(payment_unit); } else { ((crmPaymentCasheLess)payment_unit).SummFull += (Decimal)record.SummaPayment; } // payment_item = payment_unit.PaymentItems.FirstOrDefault(x => x.Order == order); // if (payment_item == null) { // payment_item = payment_unit.PaymentItemsCreateMoney(); // } //if (payment_unit.PaymentItems.Count == 0) { // payment_item = payment_unit.PaymentItemsCreateMoney(); //} //else { // payment_item = payment_unit.PaymentItems[0]; //} //if (payment_items.Contains(payment_item)) { // payment_item.SummFull += (Decimal)record.SummaPayment; // payment_item.AccountSumma += (Decimal)record.SummaPayment; //} //else { // payment_item.SummFull = (Decimal)record.SummaPayment; // payment_item.AccountSumma = (Decimal)record.SummaPayment; // payment_items.Add(payment_item); //} } IList <crmDeliveryUnit> del_delivery_units = new List <crmDeliveryUnit>(); IList <crmPaymentUnit> del_payment_units = new List <crmPaymentUnit>(); foreach (crmStage stage in stages) { foreach (crmDeliveryUnit delivery_unit in stage.DeliveryPlan.DeliveryUnits) { if (!delivery_units.Contains(delivery_unit)) { del_delivery_units.Add(delivery_unit); } } foreach (crmPaymentUnit payment_unit in stage.PaymentPlan.PaymentUnits) { if (!payment_units.Contains(payment_unit)) { del_payment_units.Add(payment_unit); } } } os.Delete(del_delivery_units); os.Delete(del_payment_units); }
public void EncodingAsyncANSI() { var engine = new FileHelperEngine <CustomersVerticalBar>(); RunAsyncTests(engine, Encoding.Default, "Good", "EncodingANSI.txt"); }
private void cmdRun_Click(object sender, EventArgs e) { FileHelperEngine engine = new FileHelperEngine(typeof(CustomersVerticalBar)); grid1.SelectedObject = engine.ReadString(txtData.Text); }
public void EncodingAsyncUnicode() { var engine = new FileHelperEngine <CustomersVerticalBar>(); RunAsyncTests(engine, Encoding.Unicode, "Good", "EncodingUnicode.txt"); }
/// <summary> /// Use an engine to read an array of objects /// </summary> /// <param name="engine">Engine to read file</param> /// <param name="pathElements">List of directories and a filename in Data area</param> /// <returns>objects from file</returns> public static T[] ReadTest <T>(FileHelperEngine <T> engine, params string[] pathElements) where T : class { return(engine.ReadFile(GetPath(pathElements))); }
static void Main(string[] args) { var filePaths = args?.ToList(); if (filePaths == null || filePaths.Count == 0) { Common.PrintUsage(Name); return; } Console.WriteLine($"{Name}:"); Console.WriteLine(); var results = new List <ExportRecord>(); var parser = new Parser(); foreach (var filePath in filePaths) { Console.WriteLine($"Path: {filePath}"); using (var stream = File.Open(filePath, FileMode.Open)) { Console.WriteLine($"Length: {stream.Length}"); var configFile = parser.ParseConfigFile(filePath, stream); var nodes = configFile.RootNode.Descendants(); var partNodes = nodes.Where(x => x.Type == NodeType.Part).ToList(); Console.WriteLine( $"Part nodes: {partNodes.Count}" ); var partsWithAntennas = partNodes .Where(x => x.Nodes .Any(n => n.Type == NodeType.Module && n.AttributeDefinitions.Any(ad => ad.Name == "name" && ad.Value == "ModuleDataTransmitter" ) ) ) ; Console.WriteLine( $"Part nodes with antennas: {partsWithAntennas.Count()}" ); results.AddRange(ConvertNodesToExportRecords( filePath, partsWithAntennas )); } Console.WriteLine($"Finished {filePath}"); Console.WriteLine(); } Console.WriteLine($"Writing {Name}.csv"); var engine = new FileHelperEngine <ExportRecord>(); engine.HeaderText = engine.GetFileHeader(); engine.WriteFile($"{Name}.csv", results); Console.WriteLine("Completed."); Console.WriteLine(); }
public void ReadOptionalRead() { engine = new FileHelperEngine(typeof(QuoteMode1)); QuoteMode1[] res = Common.ReadTest(engine, @"Good\QuoteMode1.txt") as QuoteMode1[]; ValidateData(res); }
private void RunConstructor(string fileName, Encoding enc) { engine = new FileHelperEngine(typeof(CustomersVerticalBar), enc); Assert.AreEqual(enc, engine.Encoding); CoreRunTest(fileName); }
public void NullString() { var engine = new FileHelperEngine <SampleType>(); engine.ReadString(null); }
public void Write <T>(string path, IEnumerable <T> records) where T : class { var engine = new FileHelperEngine <T>(Encoding.UTF8); engine.WriteFile(path, records); }
/// <summary> /// 將 CVS Data Insert 到 DB 的 Prices Table。 /// </summary> private void InsertCSVDataToDB() { // FileHelper Parse csv data var engine = new FileHelperEngine <LandCsv>(); var files = Directory.GetFiles("./temp"); List <Prices> prices = new List <Prices>(); foreach (var file in files) { if (file.Contains("temp.zip")) { continue; } var parseData = engine.ReadFile(file); var temp = parseData.Skip(2).Select(data => new Prices { District = data.TheVillagesAndTownsUrbanDistrict, TransactionSign = data.TransactionSign, HouseNumberPlate = data.HouseNumberPlate, AreaSquareMeter = data.AreaSquareMeter, TransactionDate = data.TransactionDate, TransactionNumber = data.TransactionNumber, TotalFlorNumber = data.TotalFlorNumber, BuildingState = data.BuildingState, MainUse = data.MainUse, MainBuildingMaterials = data.MainBuildingMaterials, CompleteDate = data.CompleteDate, ShiftingTotalArea = data.BuildingShiftingTotalArea, Room = data.Room, Hall = data.Hall, Health = data.Health, Compartmented = data.Compartmented, ManageOrganization = data.ManageOrganization, TotalPrices = data.TotalPrices, UnitPrices = data.UnitPrices, BerthCategory = data.BerthCategory, BerthAreaSquareMeter = data.BerthAreaSquareMeter, BerthTotalPrices = data.BerthTotalPrices, Note = data.Note, MainBuildingArea = data.MainBuildingArea, OutbuildingArea = data.OutbuildingArea, BalconyArea = data.BalconyArea, Elevator = data.Elevator }); prices.AddRange(temp); } using (IUnitOfWork uow = _unitOfWorkManager.Begin()) { uow.BeginTransaction(); // 判斷是否新增過,如果新增過直接 return。 string TransactionDate = prices.Take(1).FirstOrDefault().TransactionDate; Prices data = uow.PricesRepository.Get(new { TransactionDate }).FirstOrDefault(); if (data != null) { uow.Rollback(); return; } // 新增資料 uow.PricesRepository.Create(prices, uow.DbTransaction); uow.Commit(); } }
public void ArrayFieldsComplex() { var engine = new FileHelperEngine <ArrayComplexType>(); engine.ReadString(""); }