public void AppendOneToFile() { engine = new FileHelperEngine(typeof (SampleType)); SampleType[] res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1).Date; res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now.Date; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile(@"test.txt", res); SampleType record = new SampleType(); record.Field1 = DateTime.Now.Date; record.Field2 = "h2"; record.Field3 = 2; engine.AppendToFile(@"test.txt", record); SampleType[] res2 = (SampleType[]) engine.ReadFile(@"test.txt"); Assert.AreEqual(3, res2.Length); Assert.AreEqual(res[0].Field1, res2[0].Field1); Assert.AreEqual(res[1].Field1, res2[1].Field1); Assert.AreEqual(DateTime.Now.Date, res2[2].Field1); }
static void Main() { Console.WriteLine("1. Clean old data."); OdbFactory.Delete("FileOut.ndb"); OdbFactory.Delete("FileOut.txt"); TimeSnapshot(); Console.WriteLine("2. Read input file."); var engine = new FileHelperEngine(typeof(ExportData)); var res = engine.ReadFile("FileIn.txt") as ExportData[]; TimeSnapshot(); Console.WriteLine("3. Prepare NDatabase db."); using (var odb = OdbFactory.Open("FileOut.ndb")) { Console.WriteLine("3a. Store items into NDatabase."); foreach (var exportData in res) odb.Store(exportData); TimeSnapshot(); Console.WriteLine("3b. Create index on NDatabase."); odb.IndexManagerFor<ExportData>().AddIndexOn("countryIndex", new[] { "CountryOrArea" }); } TimeSnapshot(); Console.WriteLine("4. Prepare final input file."); // To Write Use: engine.WriteFile("FileOut.txt", res); TimeSnapshot(); Console.WriteLine("5. Start counting EGYPT by FileHelpers."); var stopwatch = new Stopwatch(); stopwatch.Start(); res = engine.ReadFile("FileOut.txt") as ExportData[]; var count = res.Count(x => x.CountryOrArea.Equals("EGYPT")); stopwatch.Stop(); Console.WriteLine("Egypt items: {0}", count); Console.WriteLine("Ellapsed: {0} ms", stopwatch.ElapsedMilliseconds); Console.WriteLine("5. Start counting EGYPT by NDatabase."); stopwatch.Reset(); stopwatch.Start(); long count2; using (var odb = OdbFactory.Open("FileOut.ndb")) { count2 = (from data in odb.AsQueryable<ExportData>() where data.CountryOrArea.Equals("EGYPT") select data).Count(); } stopwatch.Stop(); Console.WriteLine("Egypt items: {0}", count2); Console.WriteLine("Ellapsed: {0} ms", stopwatch.ElapsedMilliseconds); TimeSnapshot(); }
public StartAndEndPair[] Load() { var fh = new FileHelperEngine<CommentedStartAndEndPair>(); var values = fh.ReadFile(_fileName); var returnValues = new List<StartAndEndPair>(); foreach (var value in values) { if (_matchRequiredTag) { if (value.Comment.Contains(_requiredTag)) { returnValues.Add(new StartAndEndPair(value.Start, value.End)); } } else { returnValues.Add(new StartAndEndPair(value.Start, value.End)); } } return returnValues.ToArray(); }
static void Main(string[] args) { //Creates new FileHelperEngine to parse csv file FileHelperEngine engine = new FileHelperEngine(typeof(XbrlUrl)); XbrlUrl[] xbrlUrls = engine.ReadFile("XbrlInstanceDocs.csv") as XbrlUrl[]; Reader reader = new Reader(); string fileId = null; using (FileStream fs = File.Create("XbrlTextBlocks.csv")) using (StreamWriter writer = new StreamWriter(fs)) { //Iterate through each xml url, strip out enclosing quotations and read xml file contents for tags //and writes contents to new xml file. foreach (var url in xbrlUrls) { fileId = url.Id.Replace("\"", "").Replace("\\", ""); var bareUrl = url.XmlUrl.Replace("\"", ""); reader.ReadXml(bareUrl, fileId, fs, writer); } } Console.WriteLine("DONE"); Console.ReadLine(); }
public static ConcurrentDictionary<int, ILinie> ReadFile(String filePath) { FileHelperEngine<LinieRecord> engine = new FileHelperEngine<LinieRecord>(); ConcurrentDictionary<int, ILinie> linien = new ConcurrentDictionary<int, ILinie>(); try { engine.ErrorManager.ErrorMode = ErrorMode.SaveAndContinue; engine.Encoding = Encoding.UTF8; LinieRecord[] res = engine.ReadFile(filePath); foreach (LinieRecord linie in res) { //Übernehmen der eingelesenen Daten in das Programm-Model: ILinie transport = new Linie(); transport.Bezeichnung = linie.Bezeichnung; transport.Echtzeit = linie.Echtzeit; transport.Id = linie.Id; transport.Reihenfolge = linie.Reihenfolge; transport.Verkehrsmittel = Linie.VerkehrsmittelConverter(linie.Verkehrsmittel); //Schreiben des Models in Collection für den Rückgabewert: linien.AddOrUpdate(transport.Id, transport, (key, oldValue) => transport); } } catch (Exception ex) { //Dokument konnte nicht geparst werden (Nicht vorhanden/geöffnet) throw new VtmParsingException("Beim Versuch die Linien zu parsen ist ein Fehler aufgetreten!", filePath, ex); } return (linien); }
public void AppendToFile() { var engine = new FileHelperEngine<SampleType>(); var res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1).Date; res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now.Date; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile(@"test.txt", res); engine.AppendToFile(@"test.txt", res); var res2 = (SampleType[]) engine.ReadFile(@"test.txt"); Assert.AreEqual(4, res2.Length); Assert.AreEqual(res[0].Field1, res2[0].Field1); Assert.AreEqual(res[1].Field1, res2[1].Field1); Assert.AreEqual(res[0].Field1, res2[2].Field1); Assert.AreEqual(res[1].Field1, res2[3].Field1); }
public void WriteFile2() { var engine = new FileHelperEngine<SampleType>(); var res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1); res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile(@"miprueba.txt", res, -10); res = (SampleType[]) engine.ReadFile(@"miprueba.txt"); if (File.Exists(@"miprueba.txt")) File.Delete(@"miprueba.txt"); Assert.AreEqual(2, res.Length); }
static void ErrorHandling() { Console.WriteLine("Testing error handling..."); Console.WriteLine(); // Estas dos lineas son el uso de la librería FileHelperEngine engine = new FileHelperEngine(typeof(Customer)); engine.ErrorManager.ErrorMode = ErrorMode.SaveAndContinue; Customer[] customers = (Customer[]) engine.ReadFile(@"..\Data\CustomersWithErrors.txt"); if (engine.ErrorManager.ErrorCount > 0) { Console.Write("Records: "); Console.WriteLine(engine.TotalRecords); Console.Write("Successful: "); Console.WriteLine(customers.Length); Console.Write("With Error: "); Console.WriteLine(engine.ErrorManager.ErrorCount); Console.Write("Error: "); Console.WriteLine(engine.ErrorManager.Errors[0].ExceptionInfo.Message); } engine.ErrorManager.SaveErrors("errors.txt"); Console.ReadLine(); if (File.Exists("errors.txt")) File.Delete("errors.txt"); }
static void Import(string simAreasPath, DbConnection connection) { var simarea = new SimAreasContext(connection, false, new DropCreateDatabaseAlways<SimAreasContext>()); var engine = new FileHelperEngine(typeof(SimAreas)); var entries = (SimAreas[])engine.ReadFile(simAreasPath); foreach (var entry in entries) { var area = (from s in simarea.SimAreas where s.SimAreaName == entry.SimAreaName select s).FirstOrDefault(); if (area == null) { area = new SimArea { SimAreaName = entry.SimAreaName, Latitude = entry.Latitude, Longitude = entry.Longitude, Height = entry.Height, GeoidSeparation = entry.GeoidSeparation, OpsLimitFile = entry.OpsLimitFile, SimLimitFile = entry.SimLimitFile, }; simarea.SimAreas.Add(area); simarea.SaveChanges(); } } }
/// <summary> /// Process the delimited file twice, /// once with the simple engine, once with Async /// </summary> /// <remarks> /// This is a simple sample of using the Normal engine and the Async engine /// </remarks> public void ReadWrite() { var engine = new FileHelperEngine<Orders>(); // to Read use: Orders[] res = engine.ReadFile(@"C:\TestIn.txt"); // to Write use: engine.WriteFile(@"C:\TestOut.txt", res); foreach (Orders order in res) { Console.WriteLine("Order Info:"); Console.WriteLine(order.CustomerID + " - " + order.OrderDate.ToString("dd/MM/yy")); } var asyncEngine = new FileHelperAsyncEngine<Orders>(); asyncEngine.BeginReadFile(@"C:\TestIn.txt"); Orders ord; while (asyncEngine.ReadNext() != null) { ord = asyncEngine.LastRecord; // your code here Console.WriteLine(ord.CustomerID); } }
public static ConcurrentDictionary<int, IHaltestelle> ReadFile(String filePath) { FileHelperEngine<HaltestelleRecord> engine = new FileHelperEngine<HaltestelleRecord>(); ConcurrentDictionary<int, IHaltestelle> haltestellen = new ConcurrentDictionary<int, IHaltestelle>(); try { engine.ErrorManager.ErrorMode = ErrorMode.SaveAndContinue; engine.Encoding = Encoding.UTF8; HaltestelleRecord[] res = engine.ReadFile(filePath); foreach (HaltestelleRecord haltestelle in res) { //Übernehmen der eingelesenen Daten in das Programm-Model: IHaltestelle transport = new Haltestelle(); Point HsLoc = new Point(haltestelle.XKoord, haltestelle.YKoord); transport.Diva = haltestelle.Diva; transport.Id = haltestelle.Id; transport.Location = HsLoc; transport.Name = haltestelle.Name; //Schreiben des Models in Collection für den Rückgabewert: haltestellen.AddOrUpdate(transport.Id, transport, (key, oldValue) => transport); } } catch (Exception ex) { //Dokument konnte nicht geparst werden (Nicht vorhanden/geöffnet) throw new VtmParsingException("Beim Versuch die Haltestellen zu parsen ist ein Fehler aufgetreten!", filePath, ex); } return (haltestellen); }
public void EncodingAdvanced4() { var engine = new FileHelperEngine(typeof(EncodingRecord)); var res = (EncodingRecord[])engine.ReadFile(FileTest.Good.EncodingAdv3.Path); Assert.AreEqual(res.Length, 18); }
public void GetFileHeader() { var engine = new FileHelperEngine<CustomersVerticalBar>(); var records = engine.ReadFile(FileTest.Good.CustomersVerticalBarDemo.Path); Check.That(engine.GetFileHeader()) .IsEqualTo("CustomerID|CompanyName|ContactName|ContactTitle|Address|City|Country"); }
static void Main(string[] args) { var engine = new FileHelperEngine<Rvu2012Record>(); var fsses = engine.ReadFile("Files\\PPRRVU12.csv"); var store = new DocumentStore {Url = "http://localhost:8080"}; store.Initialize(); using (IDocumentSession session = store.OpenSession()) { session.Advanced.MaxNumberOfRequestsPerSession = 999; var dels = new List<CmsRvu2012a>(); do { dels = session .Query<CmsRvu2012a>() .Customize(x => x.WaitForNonStaleResults()) .Take(1024) .ToList(); dels.ForEach(session.Delete); session.SaveChanges(); } while (dels.Count > 0); } using (IDocumentSession session = store.OpenSession()) { session.Advanced.MaxNumberOfRequestsPerSession = 999; var finals = fsses.Select( rvu => new CmsRvu2012a { Hcpcs = rvu.Hcpcs, Modifier = string.IsNullOrEmpty(rvu.Modifier) ? null : rvu.Modifier, Work = rvu.WorkRvu ?? 0, TransitionedFacilityPracticeExpense = rvu.TransitionedFacilityPracticeExpenseRvu ?? 0, TransitionedFacilityPracticeExpenseIndicator = rvu.TransitionedFacilityNaIndicator == "NA" ? true : false, TransitionedNonFacilityPracticeExpense = rvu.TransitionedNonFacilityPractiveExpenseRvu ?? 0, TransitionedNonFacilityPracticeExpenseIndicator = rvu.TransitionedNonFacilityNaIndicator == "NA" ? true : false, Malpractice = rvu.MalpracticeRvu ?? 0 }) .ToList(); int i = 0; foreach (var final in finals) { session.Store(final); i++; if (i%1024 == 0) session.SaveChanges(); } session.SaveChanges(); } }
public void Sort3() { engine = new FileHelperEngine(typeof (CustomersVerticalBar)); object[] res = engine.ReadFile(FileTest.Good.Sort1.Path); Assert.Throws<BadUsageException>(() => CommonEngine.SortRecords(res)); }
public void MultilineFull() { var engine = new FileHelperEngine<FHClient>(); var res = engine.ReadFile(TestCommon.GetPath("Good", "MultilineFull.txt")); Assert.AreEqual(16, res.Length); Assert.AreEqual(16, engine.TotalRecords); }
public void Sort3() { var engine = new FileHelperEngine<CustomersVerticalBar>(); var res = engine.ReadFile(FileTest.Good.Sort1.Path); Assert.Throws<BadUsageException>(() => CommonEngine.SortRecords(res)); }
public void DiscardFirst4() { var engine = new FileHelperEngine<DiscardType2>(); var res = engine.ReadFile(FileTest.Good.DiscardFirst2.Path); Assert.AreEqual(4, res.Length); Assert.AreEqual(new DateTime(1314, 12, 11), res[0].Field1); Assert.AreEqual(mExpectedLongHeaderText, engine.HeaderText); }
public void MultilineFull() { engine = new FileHelperEngine(typeof(FHClient)); object[] res = engine.ReadFile(TestCommon.GetPath("Good", "MultilineFull.txt")); Assert.AreEqual(16, res.Length); Assert.AreEqual(16, engine.TotalRecords); }
//-> Name:Before Read Event Handling //-> Description:Show how to implement read before event //-> File:RunEngine.cs /// <summary> /// reads report.inp and skips all the records that are not detail records using a simple criteria /// </summary> public override void Run() { var engine = new FileHelperEngine<OrdersFixed>(); engine.BeforeReadRecord += BeforeEvent; var result = engine.ReadFile("report.inp"); foreach (var value in result) Console.WriteLine("Customer: {0} Freight: {1}", value.CustomerID, value.Freight); }
//-> Name:After Write Event Handling //-> Description:Show how to implement write after event //-> File:RunEngine.cs /// <summary> /// Run a record through engine using the write event to filter out unwanted details /// </summary> public override void Run() { var engine = new FileHelperEngine<OrdersFixed>(); var result = engine.ReadFile("Input.txt"); // add our filter logic. engine.AfterWriteRecord += AfterWriteEvent; engine.WriteFile("output.txt", result); }
public void Sort5() { var engine = new FileHelperEngine<CustomersSort>(); CustomersSort[] res = engine.ReadFile(FileTest.Good.Sort1.Path); Assert.AreEqual(8, res.Length); Assert.Throws<BadUsageException>(() => CommonEngine.SortRecordsByField(res, "CompanyNameNoExistHere")); }
public void DiscardFirst2() { var engine = new FileHelperEngine<DiscardType1>(); var res = engine.ReadFile(FileTest.Good.DiscardFirst1.Path); Assert.AreEqual(engine.TotalRecords, res.Length); Assert.AreEqual(4, res.Length); Assert.AreEqual(new DateTime(1314, 12, 11), res[0].Field1); }
public void ReadMaxRecords05() { var engine = new FileHelperEngine<SampleType>(); var res = (SampleType[]) engine.ReadFile(FileTest.Good.Test1.Path, 0); Assert.AreEqual(0, res.Length); Assert.AreEqual(0, engine.TotalRecords); Assert.AreEqual(0, engine.ErrorManager.ErrorCount); }
//-> File:Example.cs /// <summary> /// Execute the engine and get some results /// </summary> public override void Run() { var engine = new FileHelperEngine<Orders>(); var records = engine.ReadFile("Input.txt"); foreach (var record in records) { this.Console.WriteLine(record.CustomerID); this.Console.WriteLine(record.OrderDate.ToString("dd/MM/yyyy")); this.Console.WriteLine(record.Freight); } }
public void ReadFileMaxRecords06() { var engine = new FileHelperEngine<SampleType>(); SampleType[] res; res = engine.ReadFile(FileTest.Good.Test1.Path, 1); Assert.AreEqual(1, res.Length); Assert.AreEqual(1, engine.TotalRecords); Assert.AreEqual(0, engine.ErrorManager.ErrorCount); }
public override IEnumerable<Row> Execute(IEnumerable<Row> rows) { var file = new FileHelperEngine(_tblClass); //var ary = new[] {"one", "two", "three"}; //var items = from a in ary select a; var items = file.ReadFile("users.txt"); foreach (object obj in items) { yield return Row.FromObject(obj); } }
public void CsvToFixedLengthCommonAsync() { CommonEngine.TransformFileAsync(Common.TestPath("Good\\Transform1.txt"), typeof(FromClass), Common.TestPath("Good\\transformout.txt"), typeof(ToClass)); FileHelperEngine engine = new FileHelperEngine(typeof(ToClass)); ToClass[] res = (ToClass[]) engine.ReadFile(Common.TestPath("Good\\transformout.txt")); if (File.Exists(Common.TestPath("Good\\transformout.txt"))) File.Delete(Common.TestPath("Good\\transformout.txt")); Assert.AreEqual(6, res.Length); }
//-> /File //-> Done !! You actually parse the file with: public override void Run() { //-> File:RunEngine.cs var engine = new FileHelperEngine<PriceRecord>(); var res = engine.ReadFile("Input.txt"); foreach (var product in res) Console.WriteLine("Product {0} price {1}", product.ProductId, product.PriceList); //-> /File }
public void ReadFileMaxRecords03() { var engine = new FileHelperEngine(typeof (SampleType)); SampleType[] res; res = (SampleType[])engine.ReadFile(FileTest.Good.Test1.Path, -1); Assert.AreEqual(4, res.Length); Assert.AreEqual(4, engine.TotalRecords); Assert.AreEqual(0, engine.ErrorManager.ErrorCount); }
static void Main(string[] args) { //Remember a backslash is a special escape character //If you want a line with a quote in the middle of the line you have to "escape" //the special meaning of a quote. Uncomment each line and see which one //passes //string illegalQuoteString = "Don't "Quote" me on that!"; string properlyEscapedQuoteString = "Don't \"Quote\" me on that!"; //The same way the Dollar sign can be used for string interpolation //the @ symbol can be used to automatically escape characters between //the quotes. So a file path stored in a variable could be assigned like: string myExampleFilePath = "C:\\Some\\Path\\To\\A\\File"; //but it's easier to use the @ string myBetterExampleFilePath = @"C:\Some\Path\To\A\File"; //both are correct but I think the second one is easier to read Console.WriteLine("*** Creating the list by paring the file ourselves ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //Create a Roster --A list of RosterItems (Yes I renamed them since the meetup) //A list is nothing more than more than one of a class //This is a generic list List<FillInTheBlankWithYourObject> //It automatically gives us methods like Add(), Remove(), Find() and FindAll() //The <> angle brackets is a SIGNIFICANT pattern in c# you will find other libraries that //use the same pattern. When you see them you usually fill in the blank with a TYPE //The next line instantiates the generic list List <RosterItem> myRoster = new List <RosterItem>(); //using System.IO file methods we can open the file and Read each line one at a time //Let's read the CSV file straight into our list //we don't have to include the entire file path since we set the file to "Copy Always" //this places the file in the same debug directory as our EXE myRoster = ReadFileToRosterList("Roster.csv"); //loop through the list and write the first and last name of each roster item //along with their position foreach (RosterItem item in myRoster) { //by putting the dollar sign in fromt of the string we're telling //c# that we want the variable names inside the string to be translated //to their values (String Interpolation) Console.WriteLine($"Player: {item.FirstName} {item.LastName}, Position: {item.Position}"); //lets add some drama by adding a quarter a second wait between lines System.Threading.Thread.Sleep(250); } Console.WriteLine(""); Console.WriteLine("*** Now let's do it using the file helpers library ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //Convert a CSV file to an object list //using FileHelpers (downloaded via nuget) var engine = new FileHelperEngine <RosterItem>(); //we don't want to read the header of the file engine.Options.IgnoreFirstLines = 1; //Now we do it an easier way var result = engine.ReadFile("Roster.csv"); //we managed to convert the file to our class //and only had to use 4 lines of code and add //an attribute of [DelimitedRecord(",")] to the class //pretty easy //lets print the info to screen again //this time we'll use a for loop instead of a foreach loop //loop through the list and write the first and last name of each roster item //along with their position for (int i = 0; i < myRoster.Count; i++) { //assign the item in the array to item //I think this makes the code a bit cleaner //you can, of course, just use myRoster[i].FirstName RosterItem item = myRoster[i]; Console.WriteLine($"Player: {item.FirstName} {item.LastName}, Position: {item.Position}"); //lets add some drama by adding a quarter a second wait between lines System.Threading.Thread.Sleep(250); } //so when should we use a for loop insatead of a foreach? when we need to //track the item number or when we need a counter for the items we're looping through //now that we have the list of items //let's "Serialize" them //The easy way to think about serialzation is that it's the process of //turning an object or object list into text (usually XML or JSON) //deserilization is the process of taking serialized text and turning it //back inot the object or object list //this code requires the XML.Serialzation library Console.WriteLine(""); Console.WriteLine("*** Now let's serialize our list ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); // Create a new XmlSerializer instance with the type of the test class XmlSerializer SerializerObj = new XmlSerializer(typeof(List <RosterItem>)); // Create a new filestream so we can write the serialized object to a file using (TextWriter WriteFileStream = new StreamWriter("roster.xml")) { //serialize our list<> SerializerObj.Serialize(WriteFileStream, myRoster); } //open the xml file with notepad //how did I figure out how to open a file in notepad? //easy I googled "C# Open File Notepad" Console.WriteLine("Opening our serialized data in notepad..."); System.Threading.Thread.Sleep(500); Process.Start("notepad.exe", "roster.xml"); Console.WriteLine(""); Console.WriteLine("*** Now let's deserialize our list ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //Deserialze the XML file // Create a new file stream for reading the XML file using (FileStream ReadFileStream = new FileStream("roster.xml", FileMode.Open, FileAccess.Read, FileShare.Read)) { // Load the object saved above by using the Deserialize function //the parenthesses mean we are "casting" the return object to that type List <RosterItem> reLoadedRoster = (List <RosterItem>)SerializerObj.Deserialize(ReadFileStream); //how many items are in our reloaded list Console.WriteLine($"We reloaded {reLoadedRoster.Count} items from our xml file."); } Console.WriteLine(""); Console.WriteLine("*** Now let's add a new item to the list ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //one of the challanges of adding a new entry is that it must be valid data //whenever possible you should prevent the user from entering bad data //in a windows or web application this is easier since most data being entered //is from a control (dropdown list or textbox), it's much more difficult to validate //when the user can only enter text, like in our console app //we're going to need to get the highest ID number so the next entry //will have a unique ID //This is a bit of a cheat since I know the items in the file we read in are numbered sequentially //so a simple count of the number of the items in the list + 1 should give us the next ID //there are probably a 100 other ways we can do this int NextId = myRoster.Count + 1; //get a new roster item //I broke this into its own method RosterItem newRosterItem = GetRosterItem(NextId); //Add the new item to the list myRoster.Add(newRosterItem); Console.WriteLine($"There are now {myRoster.Count} items in our list."); Console.WriteLine($"You just added {newRosterItem.FirstName} {newRosterItem.LastName} to the roster."); //Console.WriteLine(myString); Console.ReadLine(); }
public void InNewLine4Bad() { engine = new FileHelperEngine(typeof(InNewLineType2)); Assert.Throws <BadUsageException>(() => engine.ReadFile(Common.TestPath(@"Bad\InNewLine4.txt"))); }
static void Main(string[] args) { #region Codes to names dictionaries Dictionary <string, string> states = new Dictionary <string, string> { { "11", "RO" }, { "12", "AC" }, { "13", "AM" }, { "14", "RR" }, { "15", "PA" }, { "16", "AP" }, { "17", "TO" }, { "21", "MA" }, { "22", "PI" }, { "23", "CE" }, { "24", "RN" }, { "25", "PB" }, { "26", "PE" }, { "27", "AL" }, { "28", "SE" }, { "29", "BA" }, { "31", "MG" }, { "32", "ES" }, { "33", "RJ" }, { "35", "SP" }, { "41", "PR" }, { "42", "SC" }, { "43", "RS" }, { "50", "MS" }, { "51", "MT" }, { "52", "GO" }, { "53", "DF" } }; Dictionary <string, string> courses = new Dictionary <string, string> { { "21", "ARQUITETURA E URBANISMO" }, { "72", "TECNOLOGIA EM ANÁLISE E DESENVOLVIMENTO DE SISTEMAS" }, { "73", "TECNOLOGIA EM AUTOMAÇÃO INDUSTRIAL" }, { "76", "TECNOLOGIA EM GESTÃO DA PRODUÇÃO INDUSTRIAL" }, { "79", "TECNOLOGIA EM REDES DE COMPUTADORES" }, { "701", "MATEMÁTICA(BACHARELADO)" }, { "702", "MATEMÁTICA(LICENCIATURA)" }, { "903", "LETRAS - PORTUGUÊS(BACHARELADO)" }, { "904", "LETRAS - PORTUGUÊS(LICENCIATURA)" }, { "905", "LETRAS - PORTUGUÊS E INGLÊS(LICENCIATURA)" }, { "906", "LETRAS - PORTUGUÊS E ESPANHOL(LICENCIATURA)" }, { "1401", "FÍSICA(BACHARELADO)" }, { "1402", "FÍSICA(LICENCIATURA)" }, { "1501", "QUÍMICA(BACHARELADO)" }, { "1502", "QUÍMICA(LICENCIATURA)" }, { "1601", "CIÊNCIAS BIOLÓGICAS(BACHARELADO)" }, { "1602", "CIÊNCIAS BIOLÓGICAS(LICENCIATURA)" }, { "2001", "PEDAGOGIA(LICENCIATURA)" }, { "2401", "HISTÓRIA(BACHARELADO)" }, { "2402", "HISTÓRIA(LICENCIATURA)" }, { "2501", "ARTES VISUAIS(LICENCIATURA)" }, { "3001", "GEOGRAFIA(BACHARELADO)" }, { "3002", "GEOGRAFIA(LICENCIATURA)" }, { "3201", "FILOSOFIA(BACHARELADO)" }, { "3202", "FILOSOFIA(LICENCIATURA)" }, { "3502", "EDUCAÇÃO FÍSICA(LICENCIATURA)" }, { "4004", "CIÊNCIA DA COMPUTAÇÃO(BACHARELADO)" }, { "4005", "CIÊNCIA DA COMPUTAÇÃO(LICENCIATURA)" }, { "4006", "SISTEMAS DE INFORMAÇÃO" }, { "4301", "MÚSICA(LICENCIATURA)" }, { "5401", "CIÊNCIAS SOCIAIS(BACHARELADO)" }, { "5402", "CIÊNCIAS SOCIAIS(LICENCIATURA)" }, { "5710", "ENGENHARIA CIVIL" }, { "5806", "ENGENHARIA ELÉTRICA" }, { "5809", "ENGENHARIA DE COMPUTAÇÃO" }, { "5814", "ENGENHARIA DE CONTROLE E AUTOMAÇÃO" }, { "5902", "ENGENHARIA MECÂNICA" }, { "6008", "ENGENHARIA QUÍMICA" }, { "6009", "ENGENHARIA DE ALIMENTOS" }, { "6208", "ENGENHARIA DE PRODUÇÃO" }, { "6306", "ENGENHARIA" }, { "6307", "ENGENHARIA AMBIENTAL" }, { "6405", "ENGENHARIA FLORESTAL" } }; Dictionary <string, string> incomes = new Dictionary <string, string> { { "a", "Até 1,5 salário mínimo" }, { "b", "De 1,5 a 3 salários mínimos" }, { "c", "De 3 a 4,5 salários mínimos" }, { "d", "De 4,5 a 6 salários mínimos" }, { "e", "De 6 a 10 salários mínimos" }, { "f", "De 10 a 30 salários mínimos" }, { "g", "Acima de 30 salários mínimos" }, { "", "Não Informado" } }; #endregion Dictionary <string, List <double> > stateGrades = new Dictionary <string, List <double> >(); Dictionary <string, Dictionary <string, List <double> > > courseIncomeGrades = new Dictionary <string, Dictionary <string, List <double> > >(); Dictionary <string, Dictionary <string, Dictionary <string, List <double> > > > courseStateIncomeGrades = new Dictionary <string, Dictionary <string, Dictionary <string, List <double> > > >(); Dictionary <string, int> countIncomes = new Dictionary <string, int>(); Dictionary <string, Dictionary <string, Dictionary <string, int> > > courseStateIncomeCount = new Dictionary <string, Dictionary <string, Dictionary <string, int> > >(); Dictionary <string, Dictionary <string, int> > stateIncomeCount = new Dictionary <string, Dictionary <string, int> >(); Dictionary <string, Dictionary <string, int> > courseIncomeCount = new Dictionary <string, Dictionary <string, int> >(); Dictionary <string, Dictionary <string, List <double> > > stateCourseGrades = new Dictionary <string, Dictionary <string, List <double> > >(); FileHelperEngine <EnadeObject> readEngine = new FileHelperEngine <EnadeObject>(); int count = 1; Console.WriteLine("Reading file and coverting to object"); var records = readEngine.ReadFile("C:\\Users\\leobr\\Dropbox\\UFF\\Visualização de dados\\microdados_enade_2014\\2.DADOS\\microdados_enade_2014_editado.csv"); foreach (EnadeObject currentRecord in records) { Console.WriteLine("Getting record " + count + " from " + records.Length); // Filling state/grades dictionary if (stateGrades.ContainsKey(currentRecord.codigoUF)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateGrades[currentRecord.codigoUF].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateGrades.Add(currentRecord.codigoUF, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { stateGrades.Add(currentRecord.codigoUF, new List <double>()); } } //Filling course/income/grades dictionary if (courseIncomeGrades.ContainsKey(currentRecord.codigoCurso)) { if (courseIncomeGrades[currentRecord.codigoCurso].ContainsKey(currentRecord.faixaDeRenda)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseIncomeGrades[currentRecord.codigoCurso][currentRecord.faixaDeRenda].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double>()); } } } else { courseIncomeGrades.Add(currentRecord.codigoCurso, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double>()); } } //Filling course/state/income/grades dictionary if (courseStateIncomeGrades.ContainsKey(currentRecord.codigoCurso)) { if (courseStateIncomeGrades[currentRecord.codigoCurso].ContainsKey(currentRecord.codigoUF)) { //If have all keys insert grade if (courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].ContainsKey(currentRecord.faixaDeRenda)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF][currentRecord.faixaDeRenda].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> ()); } } } else { // If don't have codigo uf key courseStateIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double>()); } } } else { // If don't have any key courseStateIncomeGrades.Add(currentRecord.codigoCurso, new Dictionary <string, Dictionary <string, List <double> > >()); courseStateIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double>()); } } // Count incomes if (countIncomes.ContainsKey(currentRecord.faixaDeRenda)) { countIncomes[currentRecord.faixaDeRenda]++; } else { countIncomes.Add(currentRecord.faixaDeRenda, 1); } // Course/State/CountIncome //Filling course/state/income/grades dictionary if (courseStateIncomeCount.ContainsKey(currentRecord.codigoCurso)) { if (courseStateIncomeCount[currentRecord.codigoCurso].ContainsKey(currentRecord.codigoUF)) { //If have all keys insert grade if (courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].ContainsKey(currentRecord.faixaDeRenda)) { courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF][currentRecord.faixaDeRenda]++; } else { courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } } else { // If don't have codigo uf key courseStateIncomeCount[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, int>()); courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } } else { // If don't have any key courseStateIncomeCount.Add(currentRecord.codigoCurso, new Dictionary <string, Dictionary <string, int> >()); courseStateIncomeCount[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, int>()); courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } //State/Income Count //Filling course/income/grades dictionary if (stateIncomeCount.ContainsKey(currentRecord.codigoUF)) { if (stateIncomeCount[currentRecord.codigoUF].ContainsKey(currentRecord.faixaDeRenda)) { stateIncomeCount[currentRecord.codigoUF][currentRecord.faixaDeRenda]++; } else { stateIncomeCount[currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } } else { stateIncomeCount.Add(currentRecord.codigoUF, new Dictionary <string, int>()); stateIncomeCount[currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } //Course/Income Count if (courseIncomeCount.ContainsKey(currentRecord.codigoCurso)) { if (courseIncomeCount[currentRecord.codigoCurso].ContainsKey(currentRecord.faixaDeRenda)) { courseIncomeCount[currentRecord.codigoCurso][currentRecord.faixaDeRenda]++; } else { courseIncomeCount[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, 1); } } else { courseIncomeCount.Add(currentRecord.codigoCurso, new Dictionary <string, int>()); courseIncomeCount[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, 1); } //Filling state/course/grades dictionary if (stateCourseGrades.ContainsKey(currentRecord.codigoUF)) { if (stateCourseGrades[currentRecord.codigoUF].ContainsKey(currentRecord.codigoCurso)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateCourseGrades[currentRecord.codigoUF][currentRecord.codigoCurso].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double>()); } } } else { stateCourseGrades.Add(currentRecord.codigoUF, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double>()); } } count++; } Console.WriteLine("Writing files"); #region CSV FILES //Writing Files List <WriteFileObject> stateGradesRecords = new List <WriteFileObject>(); List <WriteFileObject> courseIncomeGradesRecords = new List <WriteFileObject>(); WriteFileObject record; double gradesSum = 0; FileHelperEngine <WriteFileObject> writeEngine = new FileHelperEngine <WriteFileObject>(); // States/grades file foreach (string currentState in stateGrades.Keys) { record = new WriteFileObject(); gradesSum = 0; // Changing states codes to names, to write the file if (states.ContainsKey(currentState)) { record.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } // Calculating average grade foreach (double currentGrade in stateGrades[currentState]) { gradesSum += currentGrade; } record.average = Math.Round((gradesSum / stateGrades[currentState].Count), 2); stateGradesRecords.Add(record); } writeEngine.WriteFile("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\StatesGrades.csv", stateGradesRecords); // Course/Income/Grades file foreach (string currentCourse in courseIncomeGrades.Keys) { foreach (string currentIncome in courseIncomeGrades[currentCourse].Keys) { record = new WriteFileObject(); gradesSum = 0; // Changing courses and incomes codes to names, to write the file if (courses.ContainsKey(currentCourse)) { record.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } if (incomes.ContainsKey(currentIncome)) { record.income = incomes[currentIncome]; } else { Console.WriteLine("Faixa de renda não encontrada na lista"); continue; } // Calculating average grade foreach (double currentGrade in courseIncomeGrades[currentCourse][currentIncome]) { gradesSum += currentGrade; } record.average = Math.Round((gradesSum / courseIncomeGrades[currentCourse][currentIncome].Count), 2); courseIncomeGradesRecords.Add(record); } } writeEngine.WriteFile("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\CourseIncomeGrades.csv", courseIncomeGradesRecords); #endregion #region JSON FILES CourseIncomeObject ciObj; StreamWriter sw = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\courseIncomeGrade.json"); // Course/Income/Grades file foreach (string currentCourse in courseIncomeGrades.Keys) { ciObj = new CourseIncomeObject(); foreach (string currentIncome in courseIncomeGrades[currentCourse].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { ciObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } switch (currentIncome) { case "a": ciObj.incomes.a = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "b": ciObj.incomes.b = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "c": ciObj.incomes.c = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "d": ciObj.incomes.d = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "e": ciObj.incomes.e = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "f": ciObj.incomes.f = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "g": ciObj.incomes.g = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; default: ciObj.incomes.naoInformado = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; } } //Writting on file sw.WriteLine(JsonConvert.SerializeObject(ciObj)); sw.Flush(); } sw.Close(); CourseStateIncomeObject csiObj; StreamWriter sw2 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\courseStateIncomeGrade.json"); // Course/Income/Grades file foreach (string currentCourse in courseStateIncomeGrades.Keys) { foreach (string currentState in courseStateIncomeGrades[currentCourse].Keys) { csiObj = new CourseStateIncomeObject(); foreach (string currentIncome in courseStateIncomeGrades[currentCourse][currentState].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { csiObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } if (states.ContainsKey(currentState)) { csiObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } switch (currentIncome) { case "a": csiObj.incomes.a = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "b": csiObj.incomes.b = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "c": csiObj.incomes.c = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "d": csiObj.incomes.d = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "e": csiObj.incomes.e = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "f": csiObj.incomes.f = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "g": csiObj.incomes.g = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; default: csiObj.incomes.naoInformado = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; } } //Writting on file sw2.WriteLine(JsonConvert.SerializeObject(csiObj)); sw2.Flush(); } } sw2.Close(); // Count Income CountIncomeObject countIncomeObj; StreamWriter sw3 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\countIncome.json"); foreach (string currentIncome in countIncomes.Keys) { countIncomeObj = new CountIncomeObject(); if (incomes.ContainsKey(currentIncome)) { countIncomeObj.income = incomes[currentIncome]; } else { Console.WriteLine("Renda não encontrada na lista"); continue; } countIncomeObj.count = countIncomes[currentIncome]; sw3.WriteLine(JsonConvert.SerializeObject(countIncomeObj)); sw3.Flush(); } sw3.Close(); CourseStateIncomeCountObject courseStateIncCountObj = new CourseStateIncomeCountObject(); StreamWriter sw4 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\CourseStateIncomeCount.json"); foreach (string currentCourse in courseStateIncomeCount.Keys) { foreach (string currentState in courseStateIncomeCount[currentCourse].Keys) { courseStateIncCountObj = new CourseStateIncomeCountObject(); foreach (string currentIncome in courseStateIncomeCount[currentCourse][currentState].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { courseStateIncCountObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } if (states.ContainsKey(currentState)) { courseStateIncCountObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } switch (currentIncome) { case "a": courseStateIncCountObj.incomes.a = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "b": courseStateIncCountObj.incomes.b = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "c": courseStateIncCountObj.incomes.c = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "d": courseStateIncCountObj.incomes.d = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "e": courseStateIncCountObj.incomes.e = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "f": courseStateIncCountObj.incomes.f = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "g": courseStateIncCountObj.incomes.g = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; default: courseStateIncCountObj.incomes.naoInformado = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; } } //Writting on file sw4.WriteLine(JsonConvert.SerializeObject(courseStateIncCountObj)); sw4.Flush(); } } sw4.Close(); StateIncomeCountObject stateIncomeCountObj = new StateIncomeCountObject(); StreamWriter sw5 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\StateIncomeCount.json"); foreach (string currentState in stateIncomeCount.Keys) { stateIncomeCountObj = new StateIncomeCountObject(); foreach (string currentIncome in stateIncomeCount[currentState].Keys) { // Changing courses codes to names, to write the file if (states.ContainsKey(currentState)) { stateIncomeCountObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } switch (currentIncome) { case "a": stateIncomeCountObj.incomes.a = stateIncomeCount[currentState][currentIncome]; break; case "b": stateIncomeCountObj.incomes.b = stateIncomeCount[currentState][currentIncome]; break; case "c": stateIncomeCountObj.incomes.c = stateIncomeCount[currentState][currentIncome]; break; case "d": stateIncomeCountObj.incomes.d = stateIncomeCount[currentState][currentIncome]; break; case "e": stateIncomeCountObj.incomes.e = stateIncomeCount[currentState][currentIncome]; break; case "f": stateIncomeCountObj.incomes.f = stateIncomeCount[currentState][currentIncome]; break; case "g": stateIncomeCountObj.incomes.g = stateIncomeCount[currentState][currentIncome]; break; default: stateIncomeCountObj.incomes.naoInformado = stateIncomeCount[currentState][currentIncome]; break; } } //Writting on file sw5.WriteLine(JsonConvert.SerializeObject(stateIncomeCountObj)); sw5.Flush(); } sw5.Close(); CourseIncomeCountObject courseIncomeCountObj = new CourseIncomeCountObject(); StreamWriter sw6 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\CourseIncomeCount.json"); foreach (string currentCourse in courseIncomeCount.Keys) { courseIncomeCountObj = new CourseIncomeCountObject(); foreach (string currentIncome in courseIncomeCount[currentCourse].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { courseIncomeCountObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } switch (currentIncome) { case "a": courseIncomeCountObj.incomes.a = courseIncomeCount[currentCourse][currentIncome]; break; case "b": courseIncomeCountObj.incomes.b = courseIncomeCount[currentCourse][currentIncome]; break; case "c": courseIncomeCountObj.incomes.c = courseIncomeCount[currentCourse][currentIncome]; break; case "d": courseIncomeCountObj.incomes.d = courseIncomeCount[currentCourse][currentIncome]; break; case "e": courseIncomeCountObj.incomes.e = courseIncomeCount[currentCourse][currentIncome]; break; case "f": courseIncomeCountObj.incomes.f = courseIncomeCount[currentCourse][currentIncome]; break; case "g": courseIncomeCountObj.incomes.g = courseIncomeCount[currentCourse][currentIncome]; break; default: courseIncomeCountObj.incomes.naoInformado = courseIncomeCount[currentCourse][currentIncome]; break; } } //Writting on file sw6.WriteLine(JsonConvert.SerializeObject(courseIncomeCountObj)); sw6.Flush(); } sw6.Close(); StateCourseGradeObject stateCourseGradeObj = new StateCourseGradeObject(); StreamWriter sw7 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\StateCourseGrade.json"); foreach (string currentState in stateCourseGrades.Keys) { foreach (string currentCourse in stateCourseGrades[currentState].Keys) { stateCourseGradeObj = new StateCourseGradeObject(); // Changing courses codes to names, to write the file if (states.ContainsKey(currentState)) { stateCourseGradeObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { stateCourseGradeObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } stateCourseGradeObj.grade = CalculateStateCourseAvgGrade(stateCourseGrades, currentState, currentCourse); //Writting on file sw7.WriteLine(JsonConvert.SerializeObject(stateCourseGradeObj)); sw7.Flush(); } } sw7.Close(); #endregion Console.WriteLine("Arquivos finalizados"); }
public IEnumerable <DatasetWeatherEvent> ParseWeatherEvents() { var currentDirectory = System.IO.Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); // Nullity check, although this is known not to be null if (string.IsNullOrWhiteSpace(currentDirectory)) { throw new System.IO.FileNotFoundException("Current Directory is null"); } // Check if csv exists #if DEBUG var weatherReportCsvFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationDebug, WeatherCsv)); #endif #if !DEBUG var weatherReportCsvFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationRelease, WeatherCsv)); #endif var csvExists = System.IO.File.Exists(weatherReportCsvFullLocation); // if csv is missing, extract it from the zip if (!csvExists) { // Check if zip exists #if DEBUG var weatherReportZipLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, WeatherZipDebug)); #endif #if !DEBUG var weatherReportZipLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, WeatherZipRelease)); #endif var zipExists = System.IO.File.Exists(weatherReportZipLocation); if (!zipExists) { throw new System.IO.FileNotFoundException("Zip is missing from the folder FIWAREHub.Datasets"); } #if DEBUG var extractLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationDebug)); #endif #if !DEBUG var extractLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationRelease)); #endif System.IO.Compression.ZipFile.ExtractToDirectory(weatherReportZipLocation, extractLocation); csvExists = System.IO.File.Exists(weatherReportCsvFullLocation); if (!csvExists) { throw new ArgumentException("CSV Not found even after extraction attempt. Possible zip corruption. Please try downloading the datasets again."); } } var engine = new FileHelperEngine <DatasetWeatherEvent>(); var weatherEvents = engine .ReadFile(weatherReportCsvFullLocation) .ToList(); var severityScale = weatherEvents.Select(we => we.Severity).DistinctBy(s => s).ToList(); return(weatherEvents); }
static async Task <bool> CreateAssetsFromCSV() { // use the FileHelpers library to load data from CSV var engine = new FileHelperEngine <Asset>(); var filePath = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), @"Data\data.csv"); var assets = engine.ReadFile(filePath); // import all the things... var contentRepositories = await GetDefinitionItems("M.Content.Repository"); var enUs = CultureInfo.GetCultureInfo("en-US"); foreach (var asset in assets) { // creates a new asset (in memory. will not persist until explicitly saved.) var newEntity = await _client.EntityFactory.CreateAsync(Constants.Asset.DefinitionName); newEntity.SetPropertyValue("FileName", asset.File); newEntity.SetPropertyValue("Title", asset.Title); newEntity.SetPropertyValue("Description", enUs, asset.Description); // assign the asset to a repo var contentRepositoryToAssetRelation = newEntity.GetRelation("ContentRepositoryToAsset", RelationRole.Child) as IChildToManyParentsRelation; contentRepositoryToAssetRelation.Add((long)contentRepositories.Single(s => s.Identifier == asset.ContentRepositoryToAsset).Id); try { // persist the asset information var entityId = await _client.Entities.SaveAsync(newEntity); Console.WriteLine(String.Format("Created - Name: {0}, EntityId: {1}", asset.Title, entityId)); // set the lifecycle status. (it seems that assets MUST exist prior to this action.) switch (asset.FinalLifeCycleStatusToAsset) { case "M.Final.LifeCycle.Status.UnderReview": await _client.Assets.FinalLifeCycleManager.SubmitAsync(entityId); break; case "M.Final.LifeCycle.Status.Approved": await _client.Assets.FinalLifeCycleManager.DirectPublishAsync(entityId); break; } // fetch jobs still need to be created in order to pull the asset content... otherwise, they will just // sit there, empty and alone. var fetchJobRequest = new WebFetchJobRequest("Fetch file for entity.", entityId); fetchJobRequest.Urls.Add(new Uri(asset.File, UriKind.Absolute)); var jobId = await _client.Jobs.CreateFetchJobAsync(fetchJobRequest); Console.WriteLine(String.Format("Created Fetch Job - EntityId: {0} JobId: {1}", entityId, jobId)); } catch (ValidationException e) { // sad face... foreach (var failure in e.Failures) { Console.WriteLine(String.Format("Failure - Source: {0}, Message: {1}", failure.Source, failure.Message)); return(false); } } } return(true); }
public void Import(IObjectSpace os, String file_name) { FileHelperEngine <DealDataImport> engine = new FileHelperEngine <DealDataImport>(); engine.Options.IgnoreFirstLines = 1; engine.Options.IgnoreEmptyLines = true; // DealDataImport[] deal_data = engine.ReadStream(reader); DealDataImport[] deal_data = engine.ReadFile(file_name); IList <fmCOrder> orders = new List <fmCOrder>(); IList <crmStage> stages = new List <crmStage>(); IList <crmDeliveryUnit> delivery_units = new List <crmDeliveryUnit>(); IList <crmDeliveryItem> delivery_items = new List <crmDeliveryItem>(); IList <crmPaymentUnit> payment_units = new List <crmPaymentUnit>(); IList <crmPaymentItem> payment_items = new List <crmPaymentItem>(); IList <csMaterial> materials = os.GetObjects <csMaterial>(); foreach (DealDataImport record in deal_data) { fmCOrder order = null; crmStage stage = null; crmDeliveryUnit delivery_unit = null; crmDeliveryItem delivery_item = null; crmPaymentUnit payment_unit = null; crmPaymentItem payment_item = null; if (String.IsNullOrEmpty(record.StageCode)) { throw new ArgumentException("Stage Code is Empty", "StageCode"); } if (record.StageCode.Substring(0, 3) == "Adv") { stage = StageStructure.FirstStage; } else { stage = StageStructure.Stages.FirstOrDefault(x => x.Code == record.StageCode); if (stage == null) { stage = StageStructure.FirstStage.SubStagesCreate(); stage.Code = record.StageCode; } if (!stages.Contains(stage)) { stage.StageType = Contract.StageType.FINANCE; stage.DeliveryMethod = DeliveryMethod.UNITS_SHEDULE; stage.PaymentMethod = PaymentMethod.SCHEDULE; // stage.DateEnd = stage.DateBegin; // stage.DateFinish = stage.DateEnd; stages.Add(stage); } } if (record.StageCode.Substring(0, 3) != "Adv") { if (String.IsNullOrEmpty(record.OrderCode)) { throw new ArgumentException("Order Code is Empty", "OrderCode"); } order = orders.FirstOrDefault(x => x.Code == record.OrderCode); if (order == null) { order = os.FindObject <fmCOrder>(new BinaryOperator("Code", record.OrderCode, BinaryOperatorType.Equal)); if (order == null) { throw new ArgumentException("Order unknow", "OrderCode"); } else { orders.Add(order); } stage.Order = order; } if (record.DateContract == null) { throw new ArgumentException("Date Contract is Empty", "DateContract"); } delivery_unit = stage.DeliveryPlan.DeliveryUnits.FirstOrDefault(x => x.DatePlane == record.DateContract); if (record.DateContract > stage.DateEnd) { stage.DateEnd = (DateTime)record.DateContract; } if (delivery_unit == null) { delivery_unit = stage.DeliveryPlan.DeliveryUnitCreate(); delivery_unit.DatePlane = (DateTime)record.DateContract; } if (!delivery_units.Contains(delivery_unit)) { delivery_units.Add(delivery_unit); } delivery_unit.Order = order; if (record.Count == null) { throw new ArgumentException("Count is Empty", "Count"); } if (record.Price == null) { throw new ArgumentException("Price is Empty", "Price"); } if (String.IsNullOrEmpty(record.NomenclatureCode)) { throw new ArgumentException("Nomenclature Code is Empty", "NomenclatureCode"); } if (!record.NomenclatureCode.Contains("*I") && !record.NomenclatureCode.Contains("*E")) { csMaterial material = materials.FirstOrDefault(x => x.CodeTechnical == record.NomenclatureCode); if (material == null) { throw new ArgumentException("Nomenclature unknow", "NomenclatureCode"); } delivery_item = delivery_unit.DeliveryItems.FirstOrDefault(x => x.Nomenclature == material); if (delivery_item == null) { delivery_item = delivery_unit.DeliveryItemsCreateMaterial(); ((crmDeliveryMaterial)delivery_item).Material = material; } delivery_item.CostCalculateMethod = CostCalculateMethod.CALC_COST; delivery_item.NDSCalculateMethod = NDSCalculateMethod.FROM_COST; delivery_item.FullCalculateMethod = FullCalculateMethod.CALC_FULL; delivery_item.Price = (Decimal)record.Price; delivery_item.CountUnit = delivery_item.Nomenclature.BaseUnit; if (delivery_items.Contains(delivery_item)) { delivery_item.CountValue += (Decimal)record.Count; } else { delivery_item.CountValue = (Decimal)record.Count; delivery_items.Add(delivery_item); } } } if (record.DateContract == null) { throw new ArgumentException("Date Contract is Empty", "DateContract"); } payment_unit = stage.PaymentPlan.PaymentUnits.FirstOrDefault(x => x.DatePlane == record.DateContract && x is crmPaymentCasheLess); if (payment_unit == null) { payment_unit = stage.PaymentPlan.PaymentCasheLessCreate(); payment_unit.DatePlane = (DateTime)record.DateContract; if (payment_unit.DatePlane > stage.DateFinish) { stage.DateFinish = payment_unit.DatePlane; } } if (!payment_units.Contains(payment_unit)) { ((crmPaymentCasheLess)payment_unit).SummFull = (Decimal)record.SummaPayment; payment_units.Add(payment_unit); } else { ((crmPaymentCasheLess)payment_unit).SummFull += (Decimal)record.SummaPayment; } // payment_item = payment_unit.PaymentItems.FirstOrDefault(x => x.Order == order); // if (payment_item == null) { // payment_item = payment_unit.PaymentItemsCreateMoney(); // } //if (payment_unit.PaymentItems.Count == 0) { // payment_item = payment_unit.PaymentItemsCreateMoney(); //} //else { // payment_item = payment_unit.PaymentItems[0]; //} //if (payment_items.Contains(payment_item)) { // payment_item.SummFull += (Decimal)record.SummaPayment; // payment_item.AccountSumma += (Decimal)record.SummaPayment; //} //else { // payment_item.SummFull = (Decimal)record.SummaPayment; // payment_item.AccountSumma = (Decimal)record.SummaPayment; // payment_items.Add(payment_item); //} } IList <crmDeliveryUnit> del_delivery_units = new List <crmDeliveryUnit>(); IList <crmPaymentUnit> del_payment_units = new List <crmPaymentUnit>(); foreach (crmStage stage in stages) { foreach (crmDeliveryUnit delivery_unit in stage.DeliveryPlan.DeliveryUnits) { if (!delivery_units.Contains(delivery_unit)) { del_delivery_units.Add(delivery_unit); } } foreach (crmPaymentUnit payment_unit in stage.PaymentPlan.PaymentUnits) { if (!payment_units.Contains(payment_unit)) { del_payment_units.Add(payment_unit); } } } os.Delete(del_delivery_units); os.Delete(del_payment_units); }
public static async Task SeedData(DataContext context, UserManager <AppUser> userManager) { if (!userManager.Users.Any()) { var users = new List <AppUser> { new AppUser { DisplayName = "Bob", UserName = "******", Email = "*****@*****.**", Image = "avatars/bob.jpg" }, new AppUser { DisplayName = "Tom", UserName = "******", Email = "*****@*****.**", Image = "avatars/tom.jpg" }, new AppUser { DisplayName = "Alice", UserName = "******", Email = "*****@*****.**", Image = "avatars/alice.jpg" } }; foreach (var user in users) { await userManager.CreateAsync(user, "password"); } await context.SaveChangesAsync(); } if (!context.Aircraft.Any()) { var engine = new FileHelperEngine <Mapping.Aircraft>(); var aircrafts = engine.ReadFile("Aircraft.csv"); foreach (var aircraft in aircrafts) { context.Aircraft.Add(new Domain.Aircraft { Id = aircraft.Id, AircraftName = aircraft.AircraftName, Description = aircraft.Description, YearInService = aircraft.YearInService, Country = aircraft.Country }); } await context.SaveChangesAsync(); } if (!context.Category.Any()) { var engine = new FileHelperEngine <Mapping.Category>(); var categories = engine.ReadFile("Category.csv"); foreach (var category in categories) { context.Category.Add(new Domain.Category { Id = category.Id, CategoryName = category.CategoryName }); } await context.SaveChangesAsync(); } if (!context.Type.Any()) { var engine = new FileHelperEngine <Mapping.Type>(); var types = engine.ReadFile("Type.csv"); foreach (var type in types) { context.Type.Add(new Domain.Type { Id = type.Id, TypeName = type.TypeName }); } await context.SaveChangesAsync(); } if (!context.Images.Any()) { var engine = new FileHelperEngine <Mapping.Image>(); var images = engine.ReadFile("Images.csv"); foreach (var image in images) { context.Images.Add(new Domain.Image { AircraftId = image.AircraftId, ImageUrl = image.ImageUrl }); } } if (!context.AircraftCategory.Any()) { var engine = new FileHelperEngine <Mapping.AircraftCategory>(); var aircraftCategories = engine.ReadFile("AircraftCategory.csv"); foreach (var aircraftCategory in aircraftCategories) { context.AircraftCategory.Add(new Domain.AircraftCategory { AircraftId = aircraftCategory.AircraftId, CategoryId = aircraftCategory.CategoryId }); } await context.SaveChangesAsync(); } if (!context.AircraftType.Any()) { var engine = new FileHelperEngine <Mapping.AircraftType>(); var aircraftTypes = engine.ReadFile("AircraftType.csv"); foreach (var aircraftType in aircraftTypes) { context.AircraftType.Add(new Domain.AircraftType { AircraftId = aircraftType.AircraftId, TypeId = aircraftType.TypeId }); } await context.SaveChangesAsync(); } }
private void Save() { int iCount = 1; bool isValid = true; foreach (ListViewItem lvItem in lvPPCPacketList.Items) { if (lvItem.Checked) { string stktkNumber = string.Empty; Guid workplaceId = Guid.Empty; DateTime uploadedOn = DateTime.Now; List <ImportDetailsInfo> detailAllList = new List <ImportDetailsInfo>(); decimal totalLine = 0, totalQty = 0, missingLine = 0, missingQty = 0; this.CheckHHTLog(lvItem.SubItems[4].Text); #region Load Header's detail info FileHelperEngine <ImportHeaderInfo> headerInfoEngine = new FileHelperEngine <ImportHeaderInfo>(); headerInfoEngine.ErrorManager.ErrorMode = ErrorMode.SaveAndContinue; ImportHeaderInfo[] headerInfoList = headerInfoEngine.ReadFile(lvItem.SubItems[8].Text); if (headerInfoEngine.ErrorManager.ErrorCount > 0) { headerInfoEngine.ErrorManager.SaveErrors(logFile); } #endregion Utility.WriteLog("Date Create : "+ uploadedOn.ToString("dd/MM/yyyy HH:mm:ss"), logFile); Utility.WriteLog("Session ID : "+ uploadedOn.ToString("yyyyMMdd-HHmmss") + "-" + iCount.ToString().PadLeft(3, '0'), logFile); Utility.WriteLog("Upload Time : "+ uploadedOn.ToString("dd/MM/yyyy HH:mm:ss"), logFile); Utility.WriteLog("HHT TRN# : "+ lvItem.SubItems[2].Text, logFile); Utility.WriteLog("Location# : " + lvItem.SubItems[9].Text + " [Original]; " + lvItem.Text + " [Current]", logFile); Utility.WriteLog("Stock Take# : "+ lvItem.SubItems[10].Text + " [Suggested]; " + lvItem.SubItems[1].Text + " [Current]", logFile); Utility.WriteLog("Process Detail : Import Data", logFile); Utility.WriteLog("Message :- ", logFile); Utility.WriteLog("=> Checking Loc# ", logFile); #region Check Workplace (Loc#) using (var ctx = new EF6.RT2020Entities()) { var wp = ctx.Workplace.Where(x => x.WorkplaceCode == lvItem.Text).AsNoTracking().FirstOrDefault(); if (wp != null) { if (wp.Retired) { Utility.WriteLog(" [ERROR] Loc# was retired ", logFile); isValid = isValid & false; } else { Utility.WriteLog(" [OK] ", logFile); workplaceId = wp.WorkplaceId; } } else { Utility.WriteLog(" [ERROR] Loc# Not Found", logFile); isValid = isValid & false; } } #endregion Utility.WriteLog(" RESULT : COMPLETED", logFile); Utility.WriteLog("=> Import Packet File ", logFile); #region Load details files string[] packetFiles = Directory.GetFiles(tempDirectory, "DT_" + lvItem.SubItems[2].Text + "*", SearchOption.TopDirectoryOnly); for (int i = 0; i < packetFiles.Length; i++) { Utility.WriteLog(@" " + (i + 1).ToString() + @") Packet => " + Path.GetFileNameWithoutExtension(packetFiles[i]) + " [" + packetFiles[i] + "] ", logFile); } #endregion Utility.WriteLog(" RESULT : COMPLETED", logFile); Utility.WriteLog("=> Checking (Header) ", logFile); stktkNumber = lvItem.SubItems[1].Text.Trim(); #region checking Header info using (var ctx = new EF6.RT2020Entities()) { var stktkHeader = ctx.StockTakeHeader.Where(x => x.TxNumber == lvItem.SubItems[1].Text.Trim()).FirstOrDefault(); if (stktkHeader != null) { if (!string.IsNullOrEmpty(stktkHeader.ADJNUM)) { Utility.WriteLog(" [ERROR] The Stock Take Number was posted, cannot be used anymore. ", logFile); isValid = isValid & false; } else if (!WorkplaceEx.GetWorkplaceCodeById(stktkHeader.WorkplaceId.Value).Equals(lvItem.Text.Trim())) { Utility.WriteLog(" [ERROR] The loc# in Stock Take Header must be as same as the selected one. ", logFile); isValid = isValid & false; } else { //? Why compare the UploadedOn down to "seconds" //string sql = "TxNumber = '" + lvItem.SubItems[1].Text.Trim() + "' AND HHTId = '" + lvItem.SubItems[4].Text + "' AND CONVERT(NVARCHAR(20), UploadedOn, 120) = '" + uploadedOn.ToString("yyyy-MM-dd HH:mm:ss") + "'"; var txNumber = lvItem.SubItems[1].Text.Trim(); var hhtId = lvItem.SubItems[4].Text; var hhtHeader = ctx.StocktakeHeader_HHT.Where(x => x.TxNumber == txNumber && x.HHTId == hhtId && x.UploadedOn.Value.ToString("yyyy-MM-dd HH:mm:ss") == uploadedOn.ToString("yyyy-MM-dd HH:mm:ss")).FirstOrDefault(); if (hhtHeader != null) { if (hhtHeader.PostedOn.Value.Year > 1900) { Utility.WriteLog(" [ERROR] The Stock Take (HHT) Number was posted, cannot be used anymore. ", logFile); isValid = isValid & false; } else { Utility.WriteLog(" [ERROR] The Stock Take (HHT) Number existed. ", logFile); isValid = isValid & false; } } else { Utility.WriteLog(" [OK] ", logFile); } } } else { Utility.WriteLog(" [OK] ", logFile); } } #endregion Utility.WriteLog("=> Checking (Detail) ", logFile); int iCountBarcode = 0; #region checking details info for (int iHeader = 0; iHeader < headerInfoList.Length; iHeader++) { ImportHeaderInfo headerInfo = headerInfoList[iHeader]; FileHelperEngine <ImportDetailsInfo> detailInfoEngine = new FileHelperEngine <ImportDetailsInfo>(); detailInfoEngine.ErrorManager.ErrorMode = ErrorMode.SaveAndContinue; string detailPacket = Path.Combine(tempDirectory, "DT_" + lvItem.SubItems[2].Text + "_" + headerInfo.ShelfId + ".TXT"); ImportDetailsInfo[] detailInfoList = detailInfoEngine.ReadFile(detailPacket); if (headerInfoEngine.ErrorManager.ErrorCount > 0) { headerInfoEngine.ErrorManager.SaveErrors(logFile); } Utility.WriteLog(" => Checking Shelf ("+ headerInfo.ShelfId + " - " + headerInfo.ShelfName + ")", logFile); for (int iDetail = 0; iDetail < detailInfoList.Length; iDetail++) { ImportDetailsInfo detailInfo = detailInfoList[iDetail]; if (string.IsNullOrEmpty(detailInfo.Barcode)) { iCountBarcode++; missingQty += detailInfo.Qty; Utility.WriteLog(" [ERROR] Barcode does not exist. ", logFile); } else { Guid productId = ProductBarcodeEx.GetProductIdByBarcode(detailInfo.Barcode); if (productId == System.Guid.Empty) { iCountBarcode++; missingQty += detailInfo.Qty; Utility.WriteLog(" [ERROR] Barcode ("+ detailInfo.Barcode + ") does not exist. ", logFile); } else { if (detailInfo.Qty <= 0) { Utility.WriteLog(" [ERROR] Barcode ("+ detailInfo.Barcode + ") QTY <= 0 ", logFile); isValid = isValid & false; } else { Utility.WriteLog(" [OK] Barcode ("+ detailInfo.Barcode + ") QTY > 0 ", logFile); } } } totalLine++; totalQty += detailInfo.Qty; detailAllList.Add(detailInfo); } missingLine += iCountBarcode; if (iCountBarcode > 0) { Utility.WriteLog(" [ERROR] Details of Shelf ("+ headerInfo.ShelfId + " - " + headerInfo.ShelfName + ") has " + iCountBarcode.ToString() + " empty barcode.", logFile); } else { Utility.WriteLog(" [OK] Details of Shelf ("+ headerInfo.ShelfId + " - " + headerInfo.ShelfName + ") has 0 empty barcode.", logFile); } } #endregion Utility.WriteLog(" RESULT : COMPLETED", logFile); Utility.WriteLog("=> Save Packet", logFile); #region isValid: wrapping up if (isValid) { if (stktkNumber.Trim().Length == 0) { stktkNumber = SystemInfoHelper.Settings.QueuingTxNumber(EnumHelper.TxType.STK); } Utility.WriteLog(" [OK] System Queue ", logFile); if (stktkNumber.Length > 0) { // Stock take header System.Guid stktkheaderId = CreateStockTakeHeader(stktkNumber.Trim(), workplaceId); Utility.WriteLog(" [OK] Create Worksheet (Stock Take - Header)", logFile); // Stock take details if (stktkheaderId != System.Guid.Empty) { CreatedStockTakeDetail(stktkheaderId, stktkNumber.Trim(), detailAllList, workplaceId, uploadedOn); } Utility.WriteLog(" [OK] Create Worksheet (Stock Take - Detail)", logFile); // Stock take header (HHT) System.Guid hhtHeaderId = CreateStockTakeHHTHeader(stktkNumber.Trim(), lvItem.SubItems[4].Text, uploadedOn, workplaceId, lvItem.SubItems[2].Text, totalLine, totalQty, missingLine, missingQty); Utility.WriteLog(" [OK] Create Worksheet (HHT Data Review - Header)", logFile); // Stock take details (HHT) if (hhtHeaderId != System.Guid.Empty) { CreateStockTakeHHTDetails(hhtHeaderId, stktkNumber.Trim(), lvItem.SubItems[4].Text, uploadedOn, detailAllList, lvItem.SubItems[2].Text); } Utility.WriteLog(" [OK] Create Worksheet (HHT Data Review - Detail)", logFile); Utility.WriteLog(" [OK] Barcode Matching", logFile); Utility.WriteLog(" [OK] Counting Missing Data", logFile); Utility.WriteLog(" RESULT : COMPLETED", logFile); // Backup text files if (!Directory.Exists(backupDirectory)) { Directory.CreateDirectory(backupDirectory); } // Header file File.Move(lvItem.SubItems[8].Text, Path.Combine(backupDirectory, Path.GetFileName(lvItem.SubItems[8].Text))); for (int i = 0; i < packetFiles.Length; i++) { File.Move(packetFiles[i], Path.Combine(backupDirectory, Path.GetFileName(packetFiles[i]))); } Utility.WriteLog("=> Backup Data RESULT : COMPLETED", logFile); } } #endregion iCount++; } } }
public void Import(IObjectSpace os, String file_name) { FileHelperEngine <DealDataImport> engine = new FileHelperEngine <DealDataImport>(); engine.Options.IgnoreFirstLines = 1; engine.Options.IgnoreEmptyLines = true; Int32 line = 0; // DealDataImport[] deal_data = engine.ReadStream(reader); DealDataImport[] deal_data = engine.ReadFile(file_name); IList <fmCOrder> orders = new List <fmCOrder>(); IList <crmStage> stages = new List <crmStage>(); IList <crmDeliveryUnit> delivery_units = new List <crmDeliveryUnit>(); IList <crmDeliveryItem> delivery_items = new List <crmDeliveryItem>(); IList <crmPaymentUnit> payment_units = new List <crmPaymentUnit>(); IList <crmPaymentItem> payment_items = new List <crmPaymentItem>(); IList <csNomenclature> materials = os.GetObjects <csNomenclature>(); foreach (DealDataImport record in deal_data) { fmCOrder order = null; crmStage stage = null; crmDeliveryUnit delivery_unit = null; crmDeliveryItem delivery_item = null; crmPaymentUnit payment_unit = null; line++; // crmPaymentItem payment_item = null; if (!String.IsNullOrEmpty(record.OrderCode)) { order = orders.FirstOrDefault(x => x.Code == record.OrderCode); if (order == null) { order = os.FindObject <fmCOrder>(new BinaryOperator("Code", record.OrderCode, BinaryOperatorType.Equal)); if (order == null) { throw new ArgumentException("Неизвестный заказ: " + record.OrderCode + " Строка: " + line, "OrderCode"); } else { orders.Add(order); } } } if (String.IsNullOrEmpty(record.StageCode)) { throw new ArgumentException("Номер этапа пустой. Строка: " + line, "StageCode"); } if (record.StageCode.Substring(0, 3) == "Adv") { stage = StageStructure.FirstStage; } else { stage = StageStructure.Stages.FirstOrDefault(x => x.Code == record.StageCode); if (stage == null) { stage = StageStructure.FirstStage.SubStagesCreate(); stage.Code = record.StageCode; } if (!stages.Contains(stage)) { stage.Order = order; stage.StageType = Contract.StageType.FINANCE; stage.DeliveryMethod = DeliveryMethod.UNITS_SHEDULE; stage.PaymentMethod = PaymentMethod.SCHEDULE; // stage.DateEnd = stage.DateBegin; // stage.DateFinish = stage.DateEnd; stages.Add(stage); } } if (record.StageCode.Substring(0, 3) != "Adv") { if (order == null) { throw new ArgumentException("Заказ не найден", "OrderCode"); } if (record.DateContract == null) { throw new ArgumentException("Незаполнена плановая дата исполнения. Строка: " + line, "DateContract"); } delivery_unit = stage.DeliveryPlan.DeliveryUnits.FirstOrDefault(x => x.DatePlane == record.DateContract); if (record.DateContract > stage.DateEnd) { stage.DateEnd = (DateTime)record.DateContract; } if (delivery_unit == null) { delivery_unit = stage.DeliveryPlan.DeliveryUnitCreate(); delivery_unit.DatePlane = (DateTime)record.DateContract; } if (!delivery_units.Contains(delivery_unit)) { delivery_units.Add(delivery_unit); } delivery_unit.Order = order; if (record.Count == null) { throw new ArgumentException("Кол-во пустое. Строка: " + line, "Count"); } if (record.Price == null) { throw new ArgumentException("Цена пустая. Строка: " + line, "Price"); } if (String.IsNullOrEmpty(record.NomenclatureCode)) { throw new ArgumentException("Номенклатура пустая. Строка: " + line, "NomenclatureCode"); } if (!record.NomenclatureCode.Contains("*I") && !record.NomenclatureCode.Contains("*E")) { // csMaterial material = ; csNomenclature nomenclature = materials.FirstOrDefault(x => x.CodeTechnical == record.NomenclatureCode); if (nomenclature == null) { throw new ArgumentException("Номенклатура: " + record.NomenclatureCode + " ненайдена. Строка: " + line, "NomenclatureCode"); } delivery_item = delivery_unit.DeliveryItems.FirstOrDefault(x => x.Nomenclature == nomenclature); if (delivery_item == null) { if (nomenclature is csMaterial) { delivery_item = delivery_unit.DeliveryItemsCreateMaterial(); ((crmDeliveryMaterial)delivery_item).Material = nomenclature as csMaterial; } else if (nomenclature is csService) { delivery_item = delivery_unit.DeliveryItemsCreateService(); ((crmDeliveryService)delivery_item).Service = nomenclature as csService; } else { throw new ArgumentException("Номенклатура: " + record.NomenclatureCode + " неверного типа. Строка: " + line, "NomenclatureCode"); } } delivery_item.CostCalculateMethod = CostCalculateMethod.CALC_COST; delivery_item.NDSCalculateMethod = NDSCalculateMethod.FROM_COST; delivery_item.FullCalculateMethod = FullCalculateMethod.CALC_FULL; delivery_item.Price = (Decimal)record.Price; delivery_item.CountUnit = delivery_item.Nomenclature.BaseUnit; if (delivery_items.Contains(delivery_item)) { delivery_item.CountValue += (Decimal)record.Count; } else { delivery_item.CountValue = (Decimal)record.Count; delivery_items.Add(delivery_item); } } } if (record.DateContract == null) { throw new ArgumentException("Дата плановая исполнения не заполнена. Строка: " + line, "DateContract"); } payment_unit = stage.PaymentPlan.PaymentUnits.FirstOrDefault(x => x.DatePlane == record.DateContract && x is crmPaymentCasheLess); //if (payment_unit != null && !payment_units.Contains(payment_unit)) { // os.Delete(payment_unit); // payment_unit = null; //} if (payment_unit == null) { payment_unit = stage.PaymentPlan.PaymentCasheLessCreate(); payment_unit.DatePlane = (DateTime)record.DateContract; if (payment_unit.DatePlane > stage.DateFinish) { stage.DateFinish = payment_unit.DatePlane; } } payment_unit.Order = order; if (!payment_units.Contains(payment_unit)) { ((crmPaymentCasheLess)payment_unit).SummFull = (Decimal)record.SummaPayment; payment_units.Add(payment_unit); } else { ((crmPaymentCasheLess)payment_unit).SummFull += (Decimal)record.SummaPayment; } // payment_item = payment_unit.PaymentItems.FirstOrDefault(x => x.Order == order); // if (payment_item == null) { // payment_item = payment_unit.PaymentItemsCreateMoney(); // } //if (payment_unit.PaymentItems.Count == 0) { // payment_item = payment_unit.PaymentItemsCreateMoney(); //} //else { // payment_item = payment_unit.PaymentItems[0]; //} //if (payment_items.Contains(payment_item)) { // payment_item.SummFull += (Decimal)record.SummaPayment; // payment_item.AccountSumma += (Decimal)record.SummaPayment; //} //else { // payment_item.SummFull = (Decimal)record.SummaPayment; // payment_item.AccountSumma = (Decimal)record.SummaPayment; // payment_items.Add(payment_item); //} } IList <crmDeliveryUnit> del_delivery_units = new List <crmDeliveryUnit>(); IList <crmDeliveryItem> del_delivery_items = new List <crmDeliveryItem>(); IList <crmPaymentUnit> del_payment_units = new List <crmPaymentUnit>(); IList <crmPaymentItem> del_payment_items = new List <crmPaymentItem>(); foreach (crmStage stage in stages) { foreach (crmDeliveryUnit delivery_unit in stage.DeliveryPlan.DeliveryUnits) { if (!delivery_units.Contains(delivery_unit)) { del_delivery_units.Add(delivery_unit); foreach (crmDeliveryItem item in delivery_unit.DeliveryItems) { del_delivery_items.Add(item); } } else { foreach (crmDeliveryItem item in delivery_unit.DeliveryItems) { if (!delivery_items.Contains(item)) { del_delivery_items.Add(item); } } } } foreach (crmPaymentUnit payment_unit in stage.PaymentPlan.PaymentUnits) { if (!payment_units.Contains(payment_unit)) { del_payment_units.Add(payment_unit); foreach (crmPaymentItem item in payment_unit.PaymentItems) { del_payment_items.Add(item); } } else { foreach (crmPaymentItem item in payment_unit.PaymentItems) { if (!payment_items.Contains(item)) { del_payment_items.Add(item); } } } } } // Не удаляем PaymentItems, поскольку не создаем их, а создаем PaymentUnitCashLess, который сам создает Item // os.Delete(del_payment_items); os.Delete(del_payment_units); os.Delete(del_delivery_items); os.Delete(del_delivery_units); }
public void ExecuteBSEEQUITYProcessing(string[] strBSECSVArr, string[] strSCBTXTArr, string strOutputFormat, string strOutputFolder, Action <string> AddMessageToLog) { FileHelperEngine engineBSECSV = new FileHelperEngine(typeof(BSECSV)); DelimitedClassBuilder cb = BuildNSECMPFile(); FileHelperEngine engineSCBTXT = new FileHelperEngine(typeof(SCBTXT)); foreach (string obj in strBSECSVArr) { //Get BSE Equity Filename day, month, year string [] words = obj.Split('\\'); string strbseequityfilename = words[words.Length - 1]; string strday = strbseequityfilename.Substring(2, 2); string strmon = strbseequityfilename.Substring(4, 2); string stryear = strbseequityfilename.Substring(6, 2); int index = obj.IndexOf("EQ"); string dt = strbseequityfilename.Substring(2, 6); string scbtxtfilename = "SCBSEALL" + strbseequityfilename.Substring(2, 4) + ".TXT"; if (!File.Exists(obj)) { AddMessageToLog("File " + strbseequityfilename + " does not exist!"); continue; } string SCBSETXTfilenamewithpath = GetFileNameWithPath(strSCBTXTArr, scbtxtfilename); if (!File.Exists(SCBSETXTfilenamewithpath)) { AddMessageToLog("File " + scbtxtfilename + " does not exist!"); continue; } BSECSV[] resbsecsv = engineBSECSV.ReadFile(obj) as BSECSV[]; SCBTXT[] resscbtxt = engineSCBTXT.ReadFile(SCBSETXTfilenamewithpath) as SCBTXT[]; int iTotalRows = resbsecsv.Length; for (int i = 0; i < iTotalRows; i++) { //Copy OI from MTO for (int j = 0; j < resscbtxt.Length; j++) { if (resbsecsv[i].sc_code == resscbtxt[j].scripcode) { resbsecsv[i].openint = resscbtxt[j].deliveryqty; break; } } } int totrows = 0; int itmp = 0; int cnt = 0; BSECSVFINAL[] finalarr = new BSECSVFINAL[resbsecsv.Length]; DateTime myDate; itmp = 0; int icntr = 0; while (icntr < resbsecsv.Length) { finalarr[icntr] = new BSECSVFINAL(); finalarr[icntr].ticker = resbsecsv[icntr].sc_code; finalarr[icntr].name = resbsecsv[icntr].sc_name; //myDate = Convert.ToDateTime(dt); //myDate = DateTime.ParseExact(dt, "ddMMyyyy", CultureInfo.InvariantCulture); //myDate=Convert.ToDateTime(strday + "-"+ strmon + "-20" + stryear); //finalarr[itmp].date = myDate.ToString("yyyyMMdd"); //String.Format("{0:yyyyMMdd}", dt); finalarr[icntr].date = "20" + stryear + strmon + strday; // String.Format("{0:yyyyMMdd}", myDate); finalarr[icntr].open = resbsecsv[icntr].open; finalarr[icntr].high = resbsecsv[icntr].high; finalarr[icntr].low = resbsecsv[icntr].low; finalarr[icntr].close = resbsecsv[icntr].close; finalarr[icntr].volume = resbsecsv[icntr].no_of_shrs; if ((resbsecsv[icntr].openint) == null) { resbsecsv[icntr].openint = 0; } finalarr[icntr].openint = resbsecsv[icntr].openint; //enint; icntr++; } FileHelperEngine engineBSECSVFINAL = new FileHelperEngine(typeof(BSECSVFINAL)); engineBSECSVFINAL.HeaderText = "Ticker,Name,Date,Open,High,Low,Close,Volume,OPENINT"; engineBSECSVFINAL.WriteFile(obj, finalarr); string folder; words = null; words = strOutputFormat.Split(','); foreach (string item in words) { if (item == "StdCSV" || item == "Metastock" || item == "Ninja" || item == "FCharts") { folder = Path.Combine(strOutputFolder, item, "BSE", "Equity", strbseequityfilename); File.Copy(obj, folder, true); } else if (item == "Amibroker") { engineBSECSVFINAL.Options.IgnoreFirstLines = 1; engineBSECSVFINAL.WriteFile(obj, finalarr); folder = Path.Combine(strOutputFolder, item, "BSE", "Equity", strbseequityfilename); File.Copy(obj, folder, true); } } File.Delete(obj); } }
public void FileNotFound() { engine = new FileHelperEngine(typeof(SampleType)); engine.ReadFile("No foun343333d this file.txt"); }
public static void AlCambiar(object source, FileSystemEventArgs e) { //WatcherChangeTypes tipoDeCambio = e.ChangeType; //string path = @"C:\home\global\lonporta\log.txt"; //if (!File.Exists(path)) //{ // File.Create(path); // TextWriter tw = new StreamWriter(path); // tw.WriteLine("El archivo " + e.FullPath.ToString() + " tuvo un cambio de: " + tipoDeCambio.ToString() + ". "); // tw.Close(); //} //else if (File.Exists(path)) //{ //using (var tw = new StreamWriter(path, true)) //{ // tw.WriteLine("El archivo " + e.FullPath.ToString() + " tuvo un cambio de: " + tipoDeCambio.ToString() + ". "); // tw.Close(); //} //Copiar archivo string Ruta; Ruta = ConfigurationManager.AppSettings.Get("Pathout"); string fileToCopy = e.FullPath.ToString(); string destinationDirectory = Ruta; try { var engine = new FileHelperEngine <Documents>(); var records = engine.ReadFile(e.FullPath); //string path2 = @"C:\home\global\lonporta\logout.txt"; /// Para escribir usamos: //using (var tw = new StreamWriter(path2, true)) //{ // tw.WriteLine(records[0].tipoDoc); // tw.WriteLine(records[2].tipoDoc); // tw.WriteLine(records[3].tipoDoc); //} && Documents.EsFecha(records[3].tipoDoc) if (Documents.EsDoc(records[0].tipoDoc) && Documents.EsPrint(records[2].tipoDoc) && Documents.EsFecha(records[3].tipoDoc)) { try { File.Copy(fileToCopy, destinationDirectory + Path.GetFileName(fileToCopy)); } catch { ServiceController sc = new ServiceController(); sc.ServiceName = "PPVWindowsService"; if (sc.Status == ServiceControllerStatus.Stopped) { // Start the service if the current status is stopped. try { // Start the service, and wait until its status is "Running". sc.Start(); sc.WaitForStatus(ServiceControllerStatus.Running); // Display the current service status. } catch (InvalidOperationException) { } } } } } catch { ServiceController sc = new ServiceController(); sc.ServiceName = "PPVWindowsService"; if (sc.Status == ServiceControllerStatus.Stopped) { // Start the service if the current status is stopped. try { // Start the service, and wait until its status is "Running". sc.Start(); sc.WaitForStatus(ServiceControllerStatus.Running); // Display the current service status. } catch (InvalidOperationException) { } } } //fin de copiar archivo ////inicio leer archivo plano //string[] lines = System.IO.File.ReadAllLines(@"C:\home\global\lonporta\" + Path.GetFileName(e.FullPath)); //String TipoDeDocumento = lines[1]; //String TipoDeImpresora = lines[3]; //String Fecha = lines[4]; //using (var tw = new StreamWriter(path, true)) //{ // tw.WriteLine("Documento: " + TipoDeDocumento + " Printer: " + TipoDeImpresora + " Fecha: " + Fecha + "."); // tw.Close(); //} //fin leer archivo plano }
protected virtual void BuildQueries() { var Engine = new FileHelperEngine <T>(); var r = Engine.ReadFile(ResumeName); }
/// <summary> /// Use an engine to read an array of objects /// </summary> /// <param name="engine">Engine to read file</param> /// <param name="pathElements">List of directories and a filename in Data area</param> /// <returns>objects from file</returns> public static T[] ReadTest <T>(FileHelperEngine <T> engine, params string[] pathElements) where T : class { return(engine.ReadFile(GetPath(pathElements))); }
public List <T> Load <T>(string path) where T : class { var engine = new FileHelperEngine <T>(Encoding.UTF8); return(engine.ReadFile(path).ToList()); }
public void UnterbrechungenEinlesen() { FileHelperEngine engine = new FileHelperEngine(typeof(UnterbrechungStrom)); engine.Options.IgnoreFirstLines = 1; UnterbrechungStrom[] res = engine.ReadFile("unterbrechungen-strom.csv") as UnterbrechungStrom[]; foreach (UnterbrechungStrom unterbrechung in res) { Unterbrechung u = new Unterbrechung(); u.LfdNr = unterbrechung.LfdNr; u.VSU_1_14 = unterbrechung.VSU_1_14; u.VSU_1_15 = unterbrechung.VSU_1_15; u.VSU_1_16 = unterbrechung.VSU_1_16; u.VSU_1_17 = unterbrechung.VSU_1_17; u.VSU_1_18 = unterbrechung.VSU_1_18; u.VSU_1_19 = unterbrechung.VSU_1_19; u.VSU_1_36 = unterbrechung.VSU_1_36; String anlass = unterbrechung.StoerungsAnlass; switch (anlass) { case "AE": u.StoerungsAnlass = StoerungsAnlassEnum.AE; break; case "ED": u.StoerungsAnlass = StoerungsAnlassEnum.ED; break; case "ZBN": u.StoerungsAnlass = StoerungsAnlassEnum.ZBN; break; case "RWS": u.StoerungsAnlass = StoerungsAnlassEnum.RWS; break; case "HG": u.StoerungsAnlass = StoerungsAnlassEnum.HG; break; case "ZW": u.StoerungsAnlass = StoerungsAnlassEnum.ZW; break; case "S": u.StoerungsAnlass = StoerungsAnlassEnum.S; break; default: u.StoerungsAnlass = StoerungsAnlassEnum.NichtGesetzt; break; } switch (unterbrechung.VSU_1_20) { case "N": u.Unterbrechungsart = UnterbrechungsArtEnum.U; break; case "G": u.Unterbrechungsart = UnterbrechungsArtEnum.G; break; default: u.Unterbrechungsart = UnterbrechungsArtEnum.NichtGesetzt; break; } UnterbrechungDetail ud = new UnterbrechungDetail(); ud.KTrafo = unterbrechung.KTrafo; ud.KTrafo_Produkt = unterbrechung.KTrafo_Produkt; switch (unterbrechung.NetzebeneID) { case "NS": ud.NetzebeneID = NetzebeneEnum.NS; break; case "MS": ud.NetzebeneID = NetzebeneEnum.MS; break; case "HS": ud.NetzebeneID = NetzebeneEnum.HS; break; case "HoeS": ud.NetzebeneID = NetzebeneEnum.HoeS; break; default: ud.NetzebeneID = NetzebeneEnum.NichtGesetzt; break; } ud.NTrafo = unterbrechung.NTrafo; ud.NTrafo_Produkt = unterbrechung.NTrafo_Produkt; ud.VSU_1_13 = unterbrechung.VSU_1_13; UnterbrechungDetail[] details = { ud }; u.Details = details; unterbrechungen.Add(u); } }
public IEnumerable <FiwareCombinedReport> ParseAccidentsDataset() { var currentDirectory = System.IO.Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); // Nullity check, although this is known not to be null if (string.IsNullOrWhiteSpace(currentDirectory)) { throw new System.IO.FileNotFoundException("Current Directory is null"); } // Check if csv exists #if DEBUG var accidentCsvFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationDebug, AccidentCsv)); #endif #if !DEBUG var accidentCsvFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationRelease, AccidentCsv)); #endif var csvExists = System.IO.File.Exists(accidentCsvFullLocation); // if csv is missing, extract it from the zip if (!csvExists) { // Check if zip exists #if DEBUG var accidentZipFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, AccidentZipDebug)); #endif #if !DEBUG var accidentZipFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, AccidentZipRelease)); #endif var zipExists = System.IO.File.Exists(accidentZipFullLocation); if (!zipExists) { throw new System.IO.FileNotFoundException("Zip is missing from the folder FIWAREHub.Datasets"); } #if DEBUG var extractLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationDebug)); #endif #if !DEBUG var extractLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, ExtractLocationRelease)); #endif System.IO.Compression.ZipFile.ExtractToDirectory(accidentZipFullLocation, extractLocation); csvExists = System.IO.File.Exists(accidentCsvFullLocation); if (!csvExists) { throw new ArgumentException("CSV Not found even after extraction attempt. Possible zip corruption. Please try downloading the datasets again."); } } // Deserialization of statistic-friendly weather mappings #if DEBUG var mappingsFileLocation = MappingsLocationDebug; #endif #if !DEBUG var mappingsFileLocation = MappingsLocationRelease; #endif var mappingsFullLocation = System.IO.Path.GetFullPath(System.IO.Path.Combine(currentDirectory, mappingsFileLocation)); var mappingExists = System.IO.File.Exists(mappingsFullLocation); if (!mappingExists) { throw new System.IO.FileNotFoundException("Weather mappings json is missing."); } var fileAsString = System.IO.File.ReadAllText(mappingsFullLocation); var weatherMappings = Newtonsoft.Json.JsonConvert.DeserializeObject <WeatherMappings>(fileAsString); // File helper engine to deserialize CSV data set var engine = new FileHelperEngine <DatasetAccidentReport>(); var accidents = engine .ReadFile(accidentCsvFullLocation) .ToList(); // Two top states with most accidents var accidentsOfTwoTopStates = accidents // Year 2017, 2018 .Where(a => new[] { 2017, 2018 }.Contains(a.StartTime.GetValueOrDefault().Year)) // Omits accident reports without weather condition .Where(a => !string.IsNullOrWhiteSpace(a.WeatherCondition)) .GroupBy(a => a.State) .OrderByDescending(g => g.Count()) .Take(2) .SelectMany(g => g.ToList()) .Select((ar, idx) => new FiwareCombinedReport { FiwareWeatherReport = new FiwareWeatherReport(ar.WeatherCondition, weatherMappings, ar, idx + 1), FiwareTrafficDataReport = new FiwareTrafficReport(ar, idx + 1) }) .ToList(); return(accidentsOfTwoTopStates); }
public ReadCsv() { var engine = new FileHelperEngine <CsvRecord>(); result = engine.ReadFile("atrax.csv"); }
/// <summary> /// Must Return the records from the DataSource (DB, Excel, etc) /// </summary> /// <returns>The extracted records.</returns> public override object[] ExtractRecords() { return(mEngine.ReadFile(mFileName)); }
/// <summary> /// Import the sessions in this providers type /// This may throw an exception if there are File I/O issues /// </summary> /// <param name="location">The location of the csv file</param> /// <returns>The list of sessions</returns> public List <Session> loadSessions(string location) { Uri uri = null; try { uri = new Uri(location); } catch (Exception e) { throw new Exception("Unable to parse location: " + e.Message); } FileHelperEngine <CsvRecord> engine = new FileHelperEngine <CsvRecord>(); List <CsvRecord> csvList = null; List <Session> sessionList = null; if (uri.Scheme == Uri.UriSchemeFile) { try { csvList = new List <CsvRecord>(engine.ReadFile(uri.LocalPath)); } catch { throw new Exception("Unable to parse sessions file." + Environment.NewLine + "The file may be corrupted or from" + Environment.NewLine + "a previous version of PuTTY Session Manager."); } } else if (uri.Scheme == Uri.UriSchemeHttp || uri.Scheme == Uri.UriSchemeHttps) { Stream s = getRemoteCsvFile(uri); if (s != null) { try { csvList = new List <CsvRecord>(engine.ReadStream(new StreamReader(s))); } catch { throw new Exception("Unable to parse sessions file." + Environment.NewLine + "The file may be corrupted or from" + Environment.NewLine + "a previous version of PuTTY Session Manager."); } finally { if (s != null) { s.Close(); } } } } else { throw new Exception("Unable to parse location: unsupported protocol"); } if (csvList != null) { sessionList = csvList.ConvertAll(new Converter <CsvRecord, Session>(CsvToSessionConvertor)); } return(sessionList); }
public static OLife LoadBySourceLookup(SourceLookup sourceLookup) { FileHelperEngine engine = new FileHelperEngine(typeof(Funds)); Funds[] fundsexport = (Funds[])engine.ReadFile(HttpContext.Current.Server.MapPath("~/App_Data/Funds.csv")); engine = new FileHelperEngine(typeof(FundPerformance)); FundPerformance[] fundPerformance = (FundPerformance[])engine.ReadFile(HttpContext.Current.Server.MapPath("~/App_Data/FundPerformance.csv")); OLife olife = new OLife(); List <OLifeBase> lstProduct = new List <OLifeBase>(); //THIS WILL CHANGE ON LIVE //int SourceId = int.Parse(sourceLookup.SourceId); // var funds = fundsexport.Where(t => t.TrustNo == SourceId).ToList(); //var sfund = fundsexport.FirstOrDefault(t => t.TrustNo == SourceId); var sfund = fundsexport.SingleOrDefault(t => t.JSECode == sourceLookup.SourceId); //foreach (var sfund in funds) //{ if (sfund != null) { InvestmentProduct fund = new InvestmentProduct() { id = String.Concat("STLB_InvestmentProduct_", sfund.TrustNo), //InvestProductSysKey = new String[] { String.Concat("Silica_FundCode{", sfund.TrustNo.ToString(), "}") }, InvestProductSysKey = new String[] { String.Concat("Silica_FundCode{", sfund.JSECode.ToString(), "}") }, ProductCode = sourceLookup.ProductCode, FullName = sfund.FundName, SaleEffectiveDate = sfund.QuarterEnd }; OLifEExtension ext = new OLifEExtension(); List <OLifEExtension> extlst = new List <OLifEExtension>(); List <FundPrice> fundprices = new List <FundPrice>(); List <SectorAllocation> sectors = new List <SectorAllocation>(); ext.VendorCode = "STANLIB"; sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Resources", Local = sfund.Resources, LocalSpecified = true }); sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Financials", Local = sfund.Financials, LocalSpecified = true }); sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Industrials", Local = sfund.Industrials, LocalSpecified = true }); sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Alt X", Local = sfund.AltX, LocalSpecified = true }); sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Property", Local = sfund.Property, LocalSpecified = true }); sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Bonds", Local = sfund.Bonds, LocalSpecified = true }); sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Cash", Local = sfund.Cash, LocalSpecified = true }); sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Foreign Equity", Foreign = sfund.ForeignEquity, ForeignSpecified = true }); sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Foreign Bonds", Foreign = sfund.ForeignBonds, ForeignSpecified = true }); sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Foreign Property", Foreign = sfund.ForeignProperty, ForeignSpecified = true }); sectors.Add(new SectorAllocation { JSECode = sfund.JSECode, MarketSector = "Foreign Cash", Foreign = sfund.ForeignCash, ForeignSpecified = true }); ext.Items = (object[])sectors.ToArray(); extlst.Add(ext); ext = new OLifEExtension(); var fundprice = fundPerformance.FirstOrDefault(t => t.JSECode.Trim() == sfund.JSECode.ToString()); if (fundprice != null) { fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M1"), Performance = fundprice.M1, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M2"), Performance = fundprice.M2, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M3"), Performance = fundprice.M3, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M4"), Performance = fundprice.M4, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M5"), Performance = fundprice.M5 }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M6"), Performance = fundprice.M6, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M7"), Performance = fundprice.M7 }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M8"), Performance = fundprice.M8, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M9"), Performance = fundprice.M9 }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M10"), Performance = fundprice.M10, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M11"), Performance = fundprice.M11 }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M12"), Performance = fundprice.M12, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M13"), Performance = fundprice.M13, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M14"), Performance = fundprice.M14, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M15"), Performance = fundprice.M15 }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M16"), Performance = fundprice.M16, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M17"), Performance = fundprice.M17, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M18"), Performance = fundprice.M18, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M19"), Performance = fundprice.M19, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M20"), Performance = fundprice.M20, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M21"), Performance = fundprice.M21, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M22"), Performance = fundprice.M22, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M23"), Performance = fundprice.M23, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M24"), Performance = fundprice.M24, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M25"), Performance = fundprice.M25, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M26"), Performance = fundprice.M26 }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M26"), Performance = fundprice.M27, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M28"), Performance = fundprice.M28, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M29"), Performance = fundprice.M29, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M30"), Performance = fundprice.M30, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M31"), Performance = fundprice.M31, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M32"), Performance = fundprice.M32, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M33"), Performance = fundprice.M33, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M34"), Performance = fundprice.M34, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M35"), Performance = fundprice.M35, Name = sfund.FundName, Type = PriceType.Price }); fundprices.Add(new FundPrice() { JSECode = fundprice.JSECode, Date = LookupDate("M36"), Performance = fundprice.M36 }); ext.Items = (object[])fundprices.ToArray(); extlst.Add(ext); } fund.OLifEExtension = extlst.ToArray(); lstProduct.Add(fund); } //} olife.Items = lstProduct.ToArray(); return(olife); }
static void Main(string[] args) { //some Linq Examples //first we'll need and object list to run queries on //well go back to last weeks roster item list //using the filehelpers library (from NuGet) we'll load our CSV file //into a list //Convert a CSV file to an object list //using FileHelpers (downloaded via nuget) var engine = new FileHelperEngine <RosterItem>(); //we don't want to read the header of the file engine.Options.IgnoreFirstLines = 1; //create the list of roster items List <RosterItem> myRosterItemList = engine.ReadFile("Roster.csv").ToList(); //now that we have a list it's time to see what we can find //in the list using Linq Console.WriteLine(""); Console.WriteLine("*** Now let's use linq to find a player from the list by id ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //a var to store the player id int playerId = 0; //ask for the player id to find Console.WriteLine("Enter a player ID:"); //get the user's answer string answer = Console.ReadLine(); //make sure the answer is a number int.TryParse(answer, out playerId); //if the user answered with a number see if we can find it if (playerId > 0) { //find a player by their id -- USING FIND RosterItem playerById = myRosterItemList.Find(x => x.ID == playerId); //make sure we have an object before we try to use it if (playerById != null) { Console.WriteLine($"ID: {playerById.ID} FirstName: {playerById.FirstName} LastName: {playerById.LastName}"); } else { //if the object is null we did not find a match Console.WriteLine("No matching ID."); } } else //user did not answer with a number { Console.WriteLine("Error: Could not parse your answer into a number."); } Console.WriteLine(""); Console.WriteLine("*** Now let's find all players by team color ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); // ask for the player id to find Console.WriteLine("Enter a Team Color:"); //get the user's answer string teamColor = Console.ReadLine(); //find all members of the team by color //find all players on a team -- USING FINDALL List <RosterItem> playersByColor = myRosterItemList.FindAll(x => x.TeamColor == teamColor); //print the header Console.WriteLine("First Name Last Name Position Team Color"); Console.WriteLine("-------------------------------------------------"); foreach (var player in playersByColor) { //print each player, their position and team color //notice the format option at the end of the variable //a negative number specifies left aligned with a minimum width //in this case we're asking for a minimum width of 13 characters //a postive number would right align the coloum Console.WriteLine($"{player.FirstName,-13}{player.LastName,-13}{player.Position,-13}{player.TeamColor,-13}"); } Console.WriteLine(""); Console.WriteLine("*** Now report player count by team color ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //find all members of the team by color //find all players on a team -- USING GROUPBY //this uses the linq Ienumerable pattern var numberOfPlayerByTeamColor = from p in myRosterItemList group p.TeamColor by p.TeamColor into g select new { TeamColor = g.Key, Count = g.Count() }; //creates a new list //print the header Console.WriteLine("Team Color Number of Players"); Console.WriteLine("-------------------------------------------------"); //print the count of players for each team color foreach (var item in numberOfPlayerByTeamColor) //count of players by team color order by most number of players to least number of player //foreach (var item in numberOfPlayerByTeamColor.OrderByDescending(x => x.Count)) { //print each team color and count Console.WriteLine($"{item.TeamColor,-13}{item.Count,-13}"); } //find the average number of players by team color -- USING AVERAGE double averagePlayersPerTeam = myRosterItemList.Average(x => x.TeamColor.Count()); Console.WriteLine(""); //print a blank line Console.WriteLine($"The average number of players by team is: {averagePlayersPerTeam}"); Console.WriteLine(""); //print a blank line Console.WriteLine("*** Now let's list the top 3 teams with the most number of players ***"); Console.WriteLine("Press Enter to continue..."); Console.ReadLine(); //print the header Console.WriteLine("Team Color Number of Players"); Console.WriteLine("------------------------------"); //using the same list we've already created, get the top 3 items --USING TAKE foreach (var item in numberOfPlayerByTeamColor.OrderByDescending(x => x.Count).Take(3)) { Console.WriteLine($"{item.TeamColor,-13}{item.Count,-13}"); } //more linq examples on stack overflow //http://stackoverflow.com/documentation/linq/topics //also see: http://linqsamples.com/ //stop the program Console.ReadLine(); }
public static List <T> Read <T>(string filePath) { var engine = new FileHelperEngine <T>(); return(engine.ReadFile(filePath).ToList()); }
public void ExecuteNSEEQUITYProcessing(string[] strMTOArr, string[] strNSEArr, string strNSESEC, string strOutputFormat, string strOutputFolder, Action <string> AddMessageToLog) { FileHelperEngine engineMTO = new FileHelperEngine(typeof(NSEMTO)); DelimitedClassBuilder cb = BuildNSECMPFile(); FileHelperEngine engineCMP = new FileHelperEngine(typeof(NSECMP)); FileHelperEngine engineSEC = new FileHelperEngine(typeof(NSESEC)); foreach (string obj in strNSEArr) { //Get NSE Equity Filename day, month, year int index = obj.IndexOf("cm"); string day = obj.Substring(index + 2, 2); string monthname = obj.Substring(index + 4, 3); string year = obj.Substring(index + 7, 4); int month = Convert.ToDateTime("01-" + monthname + "-2011").Month; if (month < 10) { monthname = "0"; } else { monthname = ""; } monthname += month.ToString(); string MTOfilename = "MTO_" + day + monthname + year + ".DAT"; string MTOfilenamewithpath = GetFileNameWithPath(strMTOArr, MTOfilename); if (!File.Exists(MTOfilenamewithpath)) { AddMessageToLog("File " + MTOfilenamewithpath + " does not exist!"); continue; } NSEMTO[] resmto = engineMTO.ReadFile(MTOfilenamewithpath) as NSEMTO[]; if (!File.Exists(obj)) { AddMessageToLog("File " + obj + " does not exist!"); continue; } NSECMP[] rescmp = engineCMP.ReadFile(obj) as NSECMP[]; if (!File.Exists(strNSESEC)) { AddMessageToLog("File " + strNSESEC + " does not exist!"); continue; } NSESEC[] ressec = engineSEC.ReadFile(strNSESEC) as NSESEC[]; int iTotalRows = rescmp.Length; for (int i = 0; i < iTotalRows; i++) { if (rescmp[i].Series == "EQ" || rescmp[i].Series == "BE") { //Copy OI from MTO for (int j = 0; j < resmto.Length; j++) { if ((resmto[j].NameOfSecurity == (string)rescmp[i].Symbol) && (resmto[j].series == (string)rescmp[i].Series)) { rescmp[i].OI = resmto[j].DeliverableQty; break; } } //Copy Security Name from SEC for (int j = 0; j < ressec.Length; j++) { if ((ressec[j].Symbol == (string)rescmp[i].Symbol)) { rescmp[i].SecurityName = ressec[j].SecurityName; break; } } } } //engineCMP.HeaderText = "Symbol,Series,Open,High,Low,Close,Last,PrevClose,Tottrdqty,Tottrdval,Timestamp,Totaltrades,Isin,OI,SecurityName"; //Dump File data engineCMP.HeaderText = "Ticker,Series,Open,High,Low,Close,Last,PrevClose,Volume,Tottrdval,Date,Totaltrades,Isin,OPENINT,NAME"; engineCMP.WriteFile(obj, rescmp); int totrows = 0; int itmp = 0; int cnt = 0; //Calculate number of rows which have series as EQ or BE and are not NULL while (cnt < rescmp.Length) { if (rescmp[cnt].Series == "EQ" || rescmp[cnt].Series == "BE") { totrows++; } cnt++; } NSECMPFINAL[] finalarr = new NSECMPFINAL[totrows]; DateTime myDate; itmp = 0; int icntr = 0; while (icntr < rescmp.Length) { if (rescmp[icntr].Series == "EQ" || rescmp[icntr].Series == "BE") { finalarr[itmp] = new NSECMPFINAL(); finalarr[itmp].Ticker = rescmp[icntr].Symbol; finalarr[itmp].Name = rescmp[icntr].SecurityName; myDate = DateTime.Parse(rescmp[icntr].Timestamp); finalarr[itmp].Date = String.Format("{0:yyyyMMdd}", myDate); finalarr[itmp].Open = rescmp[icntr].Open; finalarr[itmp].High = rescmp[icntr].High; finalarr[itmp].Low = rescmp[icntr].Low; finalarr[itmp].Close = rescmp[icntr].Close; finalarr[itmp].Volume = rescmp[icntr].Tottrdqty; finalarr[itmp].OpenInt = rescmp[icntr].OI; itmp++; } icntr++; } FileHelperEngine engineCMPFINAL = new FileHelperEngine(typeof(NSECMPFINAL)); engineCMPFINAL.HeaderText = "Ticker,Name,Date,Open,High,Low,Close,Volume,Openint"; engineCMPFINAL.WriteFile(obj, finalarr); //FileHelpers.CsvOptions options = new FileHelpers.CsvOptions("ImportRecord", ',', obj); //options.HeaderLines = 1; //FileHelperEngine test = new FileHelpers.CsvEngine(options); ////DataTable header = test.ReadStringAsDT(FileHelpers.CommonEngine.RawReadFirstLines(obj, 1)); ////test.Options.IgnoreFirstLines = 0; //DataTable dttest = test.ReadFileAsDT(obj); string[] words = null; words = strOutputFormat.Split(','); //Get Filename index = obj.IndexOf("cm"); string fname = obj.Substring(index, 19); string folder; foreach (string item in words) { string outputfoldername = Path.Combine(strOutputFolder, item, "NSE", "Equity"); if (item == "StdCSV" || item == "Metastock" || item == "Ninja" || item == "FCharts") { folder = Path.Combine(strOutputFolder, item, "NSE", "Equity", fname); if (!Directory.Exists(outputfoldername)) { AddMessageToLog("Directory " + outputfoldername + " does not exist!"); } else { File.Copy(obj, folder, true); } } else if (item == "Amibroker") { engineCMPFINAL.Options.IgnoreFirstLines = 1; engineCMPFINAL.WriteFile(obj, finalarr); folder = Path.Combine(strOutputFolder, item, "NSE", "Equity", fname); if (!Directory.Exists(outputfoldername)) { AddMessageToLog("Directory " + outputfoldername + " does not exist!"); } else { File.Copy(obj, folder, true); } } } File.Delete(obj); } }
public static object[] ReadTest(FileHelperEngine engine, params string[] pathElements) { return(engine.ReadFile(GetPath(pathElements))); }
/// <summary> /// Create a new PriceImport object based on the input file. /// </summary> /// <param name="filename">File with the CSV to import</param> /// <param name="description">Description for the new import process</param> /// <param name="haveHeader">Indicates if the file has header line</param> /// <param name="separationChar">Indicates the separation character in the CSV file</param> /// <param name="path">Indicates the path of the CSV file</param> /// <returns></returns> public PriceImport Create(string newfilename, string description, bool haveHeader, char separationChar, string path, string originalfilename) { Utils.GetLogger().Debug(string.Format("[[Product Import]] Start {0}", description)); DelimitedClassBuilder cb = CreateClassBuilder(separationChar, haveHeader); FileHelperEngine engine = new FileHelperEngine(cb.CreateRecordClass()); object[] items = engine.ReadFile(path + newfilename); if (items.Length <= 0) { throw new EmptyImportationFileException("No se encontraron registros para ingresar."); } Utils.GetLogger().Debug(string.Format("[[Product Import]] Starting to get database data {0}", description)); // TODO: We should only get the necessary fields for each object and not the whole object. provlist = ControllerManager.Provider.GetActives() as List <Provider>; catlist = ControllerManager.CategoryBase.GetAll() as List <CategoryBase>; currlist = ControllerManager.Currency.GetAll() as List <Currency>; // Get Needed PriceBase Data IQuery q = NHibernateSession.CreateQuery("select lower(trim(P.Code)), lower(trim(PB.ProviderCode)), PB.Provider.ID, PB.ID FROM PriceBase PB JOIN PB.Product P"); q.SetResultTransformer(new NHibernate.Transform.AliasToBeanConstructorResultTransformer(typeof(PriceBaseFound).GetConstructors()[0])); List <PriceBaseFound> lst = q.List <PriceBaseFound>() as List <PriceBaseFound>; // Clear the memory NHibernateSession.Flush(); NHibernateSession.Clear(); Utils.GetLogger().Info(string.Format("[[Product Import]] Ready {0}", description)); List <PriceImportLog> lstDuplicates = new List <PriceImportLog>(items.Length); // Start the transaction this.BeginTransaction(); // Create the PriceImport item PriceImport pi = new PriceImport(); pi.ImportStatus = ImportStatus.Invalid; pi.File = originalfilename; pi.Description = description; pi.DateImported = DateTime.Now; pi.HaveHeader = haveHeader; pi.SeparationChar = separationChar; Save(pi); bool error = false; bool atLeastOneValid = false; for (int i = 0; i < items.Length; i++) { string originalline = ""; // Create the item PriceImportLog lr = new PriceImportLog(); lr.CodGrundfos = GetValue("CodGrundfos", items[i]).ToString(); lr.CodProvider = GetValue("CodProv", items[i]).ToString(); lr.Model = GetValue("Modelo", items[i]).ToString(); lr.Description = GetValue("Descripcion", items[i]).ToString(); lr.Provider = GetValue("Proveedor", items[i]).ToString(); lr.Frequency = GetValue("Frecuencia", items[i]).ToString(); lr.TP = (decimal?)GetValue("TP", items[i]); lr.TPCurrency = GetValue("MonedaTP", items[i]).ToString(); lr.GRP = (decimal?)GetValue("GRP", items[i]); lr.GRPCurrency = GetValue("MonedaGRP", items[i]).ToString(); lr.PL = (decimal?)GetValue("PL", items[i]); lr.PLCurrency = GetValue("MonedaPL", items[i]).ToString(); lr.Cat1 = GetValue("Familia", items[i]).ToString(); lr.Cat2 = GetValue("Tipo", items[i]).ToString(); lr.Cat3 = GetValue("Linea", items[i]).ToString(); lr.OriginalLine = originalline; lr.FileIndex = i; // Check if at least the required fields are completed, otherwise ignore. if (string.IsNullOrEmpty(lr.Provider) || (string.IsNullOrEmpty(lr.CodGrundfos) && string.IsNullOrEmpty(lr.CodProvider))) { NHibernateSession.Evict(lr); error = true; continue; } // Check the whole item data CheckLogResultItem(lr); if (lr.CodGrundfos == string.Empty || lr.CodProvider == string.Empty) { if (lstDuplicates.Exists(delegate(PriceImportLog record) { if ((record.CodGrundfos != string.Empty && record.CodGrundfos == lr.CodGrundfos && record.Provider == lr.Provider) || (record.CodProvider != string.Empty && record.CodProvider == lr.CodProvider && record.Provider == lr.Provider)) { return(true); } return(false); })) { SetErrorOnItem(lr, Resource.Business.GetString("DuplicatedProduct")); } } lstDuplicates.Add(lr); if (lr.Status != PriceImportLogStatus.Error) { string codGrundFos = lr.CodGrundfos.Trim(); string codProvider = lr.CodProvider.Trim(); Provider prov = FindInMemory(provlist, lr.Provider); PriceBaseFound pb; if (!string.IsNullOrEmpty(codGrundFos)) { codGrundFos = codGrundFos.ToLower(); pb = lst.Find(delegate(PriceBaseFound record) { if (record.Code == codGrundFos && record.ProviderId == prov.ID) { return(true); } return(false); }); } else { codProvider = codProvider.ToLower(); pb = lst.Find(delegate(PriceBaseFound record) { if (record.CodeProvider == codProvider && record.ProviderId == prov.ID) { return(true); } return(false); }); } if (pb != null) { lr.Status = PriceImportLogStatus.Modify; lr.ParsedPriceBase = new PriceBase(pb.PriceBaseId); if (string.IsNullOrEmpty(GetValue("Frecuencia", items[i]).ToString())) { lr.Frequency = string.Empty; lr.ParsedFrequency = null; } if (string.IsNullOrEmpty(GetValue("MonedaTP", items[i]).ToString())) { lr.TPCurrency = string.Empty; } if (string.IsNullOrEmpty(GetValue("MonedaGRP", items[i]).ToString())) { lr.GRPCurrency = string.Empty; } if (string.IsNullOrEmpty(GetValue("MonedaPL", items[i]).ToString())) { lr.PLCurrency = string.Empty; } } } // Save the item lr.PriceImport = pi; NHibernateSession.Save(lr); if (lr.Status != PriceImportLogStatus.Error) { atLeastOneValid = true; } else { error = true; } } // Update the status of the PriceImport item if (error) { pi.ImportStatus = (atLeastOneValid) ? ImportStatus.VerifiedSomeInvalid : ImportStatus.Invalid; } else { pi.ImportStatus = ImportStatus.Verified; } // Commit Changes CommitChanges(); Utils.GetLogger().Info(string.Format("[[Product Import]] Finished with Identifier {0} and status {1}", pi.ID, pi.ImportStatus)); File.Move(@path + newfilename, @path + pi.ID + ".csv"); return(pi); }
private void Working() { Receive <ReadFile>(file => { if (_currentFile == file.Args.FullPath) { LogToEverything(Context, $"Already working Recieved File {file.Args.FullPath}"); } else { LogToEverything(Context, $"Stashing Recieved File {file.Args.FullPath}"); Stash.Stash(); } }); Receive <WorkFile>(file => { LogToEverything(Context, $"Working File {file.Args.FullPath}"); var engine = new FileHelperEngine <FileModel>(); var records = engine.ReadFile(file.Args.FullPath); int x = 0; foreach (var record in records) { var userRecord = new UserRecord(record.AdUserName, x++); if (!_records.ContainsKey(userRecord.AdUserName)) { LogToEverything(Context, $"Working Row {userRecord.AdUserName} "); _workerRouter.Tell(new ProcessLine(userRecord.AdUserName)); _records.Add(userRecord.AdUserName, userRecord); } else { LogToEverything(Context, $"Duplicate Row {userRecord.AdUserName} "); } } }); Receive <LineComplete>(user => { if (_records.ContainsKey(user.UserName)) { LogToEverything(Context, $"The line has been processed for {user.UserName}"); _records[user.UserName].Processed = true; } if (_records.Values.All(x => x.Processed)) { LogToEverything(Context, $"File has been processed: {_currentFile} "); _currentFile = ""; _records = new Dictionary <string, UserRecord>(); Become(WaitingToWork); Stash.UnstashAll(); } }); ReceiveAny(task => { _logger.Error(" [x] Oh Snap! Unhandled message: \r\n{0}", task); }); }
static void Main() { Console.WriteLine("1. Clean old data."); OdbFactory.Delete("FileOut.ndb"); OdbFactory.Delete("FileOut.txt"); TimeSnapshot(); Console.WriteLine("2. Read input file."); var engine = new FileHelperEngine(typeof(ExportData)); var res = engine.ReadFile("FileIn.txt") as ExportData[]; TimeSnapshot(); Console.WriteLine("3. Prepare NDatabase db."); using (var odb = OdbFactory.Open("FileOut.ndb")) { Console.WriteLine("3a. Store items into NDatabase."); foreach (var exportData in res) { odb.Store(exportData); } TimeSnapshot(); Console.WriteLine("3b. Create index on NDatabase."); odb.IndexManagerFor <ExportData>().AddIndexOn("countryIndex", new[] { "CountryOrArea" }); } TimeSnapshot(); Console.WriteLine("4. Prepare final input file."); // To Write Use: engine.WriteFile("FileOut.txt", res); TimeSnapshot(); Console.WriteLine("5. Start counting EGYPT by FileHelpers."); var stopwatch = new Stopwatch(); stopwatch.Start(); res = engine.ReadFile("FileOut.txt") as ExportData[]; var count = res.Count(x => x.CountryOrArea.Equals("EGYPT")); stopwatch.Stop(); Console.WriteLine("Egypt items: {0}", count); Console.WriteLine("Ellapsed: {0} ms", stopwatch.ElapsedMilliseconds); Console.WriteLine("5. Start counting EGYPT by NDatabase."); stopwatch.Reset(); stopwatch.Start(); long count2; using (var odb = OdbFactory.Open("FileOut.ndb")) { count2 = (from data in odb.AsQueryable <ExportData>() where data.CountryOrArea.Equals("EGYPT") select data).Count(); } stopwatch.Stop(); Console.WriteLine("Egypt items: {0}", count2); Console.WriteLine("Ellapsed: {0} ms", stopwatch.ElapsedMilliseconds); TimeSnapshot(); }
public void InNewLine2rw() { engine = new FileHelperEngine(typeof(InNewLineType2)); InNewLineType2[] res = (InNewLineType2[])engine.ReadString(engine.WriteString(engine.ReadFile(TestCommon.TestPath(@"Good\InNewLine2.txt")))); Assert.AreEqual(3, res.Length); Assert.AreEqual(3, engine.TotalRecords); Assert.AreEqual("166.90.252.2", res[0].IpAddress); Assert.AreEqual("67.105.166.35", res[1].IpAddress); Assert.AreEqual("67.105.166.35", res[2].IpAddress); Assert.AreEqual(111, res[0].FieldLast); Assert.AreEqual(222, res[1].FieldLast); Assert.AreEqual(333, res[2].FieldLast); }
/// <summary> /// /// </summary> /// <param name="path">Path to property definition folder</param> /// <returns></returns> private Property LoadPropertyTemplate(string path) { string propID = Path.GetFileName(path).ToLowerInvariant(); propID = propID.ToLowerInvariant(); string namesFileFullPath = Path.Combine(path, NamesFile); string muticlassFileFullPath = Path.Combine(path, MulticlassFile); string backgroundImagesDirFullPath = Path.Combine(path, BackgroundFillFolder); string iconDirFullPath = Path.Combine(path, IconsFolder); string exampleImagesDirFullPath = Path.Combine(path, ExampleImagesFolder); Dictionary <string, Class> loadedClasses = new Dictionary <string, Class>(); if (File.Exists(namesFileFullPath)) { var engine = new FileHelperEngine <NamesFileRow>(); var rows = engine.ReadFile(namesFileFullPath); bool backgroundBrushFolderAvailable = Directory.Exists(backgroundImagesDirFullPath); foreach (NamesFileRow row in rows) { string classID = row.ID.ToLowerInvariant(); if (!row.WidthPercentage.HasValue) { row.WidthPercentage = 100.0; } else if (row.WidthPercentage < 0 || row.WidthPercentage > 100.0) { MessageBox.Show(string.Format("значение ширна крапа вне допустимых значений. допустимый интервал 0 - 100 (%), в файле задано {0}. Будет использовано значение 100%", row.WidthPercentage), "Ширина крапа", MessageBoxButton.OK, MessageBoxImage.Warning); row.WidthPercentage = 100.0; } //tring to load corresponding background SVG image string backgroundPatternSVG = null; string bgClassBgPatternPath = Path.Combine(backgroundImagesDirFullPath, string.Format("{0}.svg", classID)); if (File.Exists(bgClassBgPatternPath)) { backgroundPatternSVG = File.ReadAllText(bgClassBgPatternPath); } //tring to load corresponding icon SVG string iconSVG = null; string iconClassPath = Path.Combine(iconDirFullPath, string.Format("{0}.svg", classID)); if (File.Exists(iconClassPath)) { iconSVG = File.ReadAllText(iconClassPath); } ImageSource exampleImage = null; string exampleImagePath = Path.Combine(exampleImagesDirFullPath, string.Format("{0}.jpg", classID)); if (File.Exists(exampleImagePath)) { exampleImage = new BitmapImage(new Uri(Path.GetFullPath(exampleImagePath))); } RightSideFormEnum rightSide = RightSideFormEnum.Straight; if (!string.IsNullOrEmpty(row.RightSideForm)) { switch (row.RightSideForm.ToLowerInvariant()) { case "прямая": rightSide = RightSideFormEnum.Straight; break; case "ступеньки": rightSide = RightSideFormEnum.Steps; break; case "волна": rightSide = RightSideFormEnum.Wave; break; default: MessageBox.Show( string.Format("Форма правой границы крапа \"{0}\", указанная в шаблоне, не поддерживается. Будет использована прямая форма. ({1})", row.RightSideForm, row.ToString()), "Форма правой границы крапа", MessageBoxButton.OK, MessageBoxImage.Warning); break; } } loadedClasses.Add(row.ID.ToLowerInvariant(), new Class() { ID = classID, Acronym = row.Acronym, ShortName = row.Name, BackgroundPatternSVG = backgroundPatternSVG, IconSVG = iconSVG, Description = row.Description, WidthRatio = row.WidthPercentage.Value * 0.01,// percent to ratio ExampleImage = exampleImage, RightSideForm = rightSide }); } return(new Property() { ID = propID, Classes = loadedClasses.Select(p => p.Value).ToArray(), Name = propID, IsMulticlass = File.Exists(muticlassFileFullPath) }); } else { throw new InvalidDataException(String.Format("Не найден файл {0} с именами классов свойства слоя {1}", namesFileFullPath, propID)); } }