public void AppendOneToFile() { engine = new FileHelperEngine(typeof (SampleType)); SampleType[] res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1).Date; res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now.Date; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile(@"test.txt", res); SampleType record = new SampleType(); record.Field1 = DateTime.Now.Date; record.Field2 = "h2"; record.Field3 = 2; engine.AppendToFile(@"test.txt", record); SampleType[] res2 = (SampleType[]) engine.ReadFile(@"test.txt"); Assert.AreEqual(3, res2.Length); Assert.AreEqual(res[0].Field1, res2[0].Field1); Assert.AreEqual(res[1].Field1, res2[1].Field1); Assert.AreEqual(DateTime.Now.Date, res2[2].Field1); }
private static void aaa() { string sql = ConfigurationManager.ConnectionStrings["test"].ConnectionString; SqlConnection cn = new SqlConnection(sql); try { SqlCommand command = new SqlCommand("SELECT TOP 1 * FROM [dbo].[UnitTask];", cn); cn.Open(); // get the schema for the customers table SqlDataReader reader = command.ExecuteReader(); DataTable schemaTable = reader.GetSchemaTable(); // create the FileHelpers record class // alternatively there is a 'FixedClassBuilder' DelimitedClassBuilder cb = new DelimitedClassBuilder("UnitTask", ","); cb.IgnoreFirstLines = 1; cb.IgnoreEmptyLines = true; // populate the fields based on the columns foreach (DataRow row in schemaTable.Rows) { cb.AddField(row.Field<string>("ColumnName"), row.Field<Type>("DataType")); cb.LastField.TrimMode = TrimMode.Both; } reader.Close(); //cb.SaveToXml(""); // load the dynamically created class into a FileHelpers engine FileHelperEngine engine = new FileHelperEngine(cb.CreateRecordClass()); DataSet CustomersDataSet = new DataSet(); SqlDataAdapter da_local; SqlCommandBuilder cmdBuilder; da_local = new SqlDataAdapter("SELECT * FROM [dbo].[UnitTask]", cn); cmdBuilder = new SqlCommandBuilder(da_local); da_local.Fill(CustomersDataSet, "UnitTask"); DataTable dt = CustomersDataSet.Tables["UnitTask"]; engine.WriteFile("c:\\UnitTask.csv", dt.Rows); // import your records //DataTable dt = engine.ReadFileAsDT("testCustomers.txt"); } catch (Exception ex) { Console.WriteLine(ex.Message.ToString()); } finally { cn.Close(); } }
//-> /File //-> Finally you must to instantiate a FileHelperEngine and write the file public override void Run() { //-> File:Example.cs var engine = new FileHelperEngine<Orders>(); var orders = new List<Orders>(); orders.Add(new Orders() { OrderID = 1, CustomerID = "AIRG", Freight = 82.43M, OrderDate = new DateTime(2009, 05, 01) }); orders.Add(new Orders() { OrderID = 2, CustomerID = "JSYV", Freight = 12.22M, OrderDate = new DateTime(2009, 05, 02) }); engine.WriteFile("Output.Txt", orders); //-> /File Console.WriteLine(engine.WriteString(orders)); }
public void WriteFile2() { var engine = new FileHelperEngine<SampleType>(); var res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1); res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile(@"miprueba.txt", res, -10); res = (SampleType[]) engine.ReadFile(@"miprueba.txt"); if (File.Exists(@"miprueba.txt")) File.Delete(@"miprueba.txt"); Assert.AreEqual(2, res.Length); }
//-> /File //-> Now just create some records and write them with the Engine public override void Run() { //-> File:Example.cs var engine = new FileHelperEngine<Customer>(); var customers = new List<Customer>(); var order1 = new Customer() { CustId = 1, Name = "Antonio Moreno Taquería", AddedDate = new DateTime(2009, 05, 01) }; var order2 = new Customer() { CustId = 2, Name = "Berglunds snabbköp", AddedDate = new DateTime(2009, 05, 02) }; customers.Add(order1); customers.Add(order2); engine.WriteFile("Output.Txt", customers); //-> /File Console.WriteLine(engine.WriteString(customers)); }
public void Export() { IEnumerable<Record> records; using (var context = new Context()) { records = context.Companies.Select(x => new Record { Name = x.Name, Cvr = x.Cvr, Type = x.Type, Legislation = x.Legislation, Profit = x.Revenue, Losses = x.Losses, Tax = x.TaxPaid, FossilProfit = x.FossilProfit, FossilLosses = x.FossilLosses, FossilTax = x.FossilTaxPaid, IsSubsidiary = x.IsSubsidiary, Subsidiaries = x.Subsidiaries, }).ToList(); } var colummnHeaders = new[] { "Name", "Cvr", "Type", "Legislation", "Profit", "Losses", "Tax", "FossilProfit", "FossilLosses", "FossilTax", "IsSubsidiary", "Subsidiaries" }; var engine = new FileHelperEngine<Record>() { HeaderText = string.Join(",", colummnHeaders), }; engine.WriteFile("out.csv", records); }
public void WriteFileNotifyPercent() { actualPerc = 0; actualAdd = 50; var engine = new FileHelperEngine<SampleType>(); engine.Progress += ProgressChangePercent; var res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1); res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile("prog1.txt", res); if (File.Exists("prog1.txt")) File.Delete("prog1.txt"); }
/// <summary> /// Process the delimited file twice, /// once with the simple engine, once with Async /// </summary> /// <remarks> /// This is a simple sample of using the Normal engine and the Async engine /// </remarks> public void ReadWrite() { var engine = new FileHelperEngine<Orders>(); // to Read use: Orders[] res = engine.ReadFile(@"C:\TestIn.txt"); // to Write use: engine.WriteFile(@"C:\TestOut.txt", res); foreach (Orders order in res) { Console.WriteLine("Order Info:"); Console.WriteLine(order.CustomerID + " - " + order.OrderDate.ToString("dd/MM/yy")); } var asyncEngine = new FileHelperAsyncEngine<Orders>(); asyncEngine.BeginReadFile(@"C:\TestIn.txt"); Orders ord; while (asyncEngine.ReadNext() != null) { ord = asyncEngine.LastRecord; // your code here Console.WriteLine(ord.CustomerID); } }
public void AppendToFile() { var engine = new FileHelperEngine<SampleType>(); var res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1).Date; res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now.Date; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile(@"test.txt", res); engine.AppendToFile(@"test.txt", res); var res2 = (SampleType[]) engine.ReadFile(@"test.txt"); Assert.AreEqual(4, res2.Length); Assert.AreEqual(res[0].Field1, res2[0].Field1); Assert.AreEqual(res[1].Field1, res2[1].Field1); Assert.AreEqual(res[0].Field1, res2[2].Field1); Assert.AreEqual(res[1].Field1, res2[3].Field1); }
public static int Write(this List<ProcessedQuote> stock, string path) { var fileEngine = new FileHelperEngine<ProcessedQuote>(); fileEngine.HeaderText = fileEngine.GetFileHeader(); fileEngine.WriteFile(path, stock); return stock.Count(); }
public static Task<string> ExportModelsAsync( IEnumerable<ProductModel> models, string mainDir, IEnumerable<string> additionalDirs, float priceMultiple, float priceAdd, IProgress<int> tracker) { // prepare directories string exportFolderName = DateTime.Now.ToString("dd-MM-yyyy HH-mm-ss"); string exportPath = Path.Combine(Constants.ExportsDir, exportFolderName); if (!Directory.Exists(exportPath)) Directory.CreateDirectory(exportPath); string imagesPath = Path.Combine(exportPath, "images"); if (!Directory.Exists(imagesPath)) Directory.CreateDirectory(imagesPath); return Task.Factory.StartNew<string>(() => { int cnt = 1; List<ExportItem> exports = new List<ExportItem>(); if (!String.IsNullOrEmpty(mainDir)) { exports.Add(new ExportItem() { Name = mainDir }); } foreach (ProductModel model in models) { IEnumerable<string> sizes = model.AllSizes.Select(a => { return a.Trim(); }); string sizeStr = BuildSize(sizes); foreach (ProductStyle style in model.Styles) { ExportItem item = CreateExportItem(model, style, additionalDirs, imagesPath, priceMultiple, priceAdd); item.Size = sizeStr; exports.Add(item); } tracker.Report(cnt++); } // write to file FileHelperEngine<ExportItem> engine = new FileHelperEngine<ExportItem>(Encoding.UTF8); engine.HeaderText = ExportItem.HEADERS; engine.WriteFile(Path.Combine(exportPath, "products.csv"), exports); // delete all downloaded files DirectoryInfo di = new DirectoryInfo(Constants.DownloadsDir); foreach (var d in di.GetFiles()) { d.Delete(); } return exportPath; }); }
static void Main() { Console.WriteLine("1. Clean old data."); OdbFactory.Delete("FileOut.ndb"); OdbFactory.Delete("FileOut.txt"); TimeSnapshot(); Console.WriteLine("2. Read input file."); var engine = new FileHelperEngine(typeof(ExportData)); var res = engine.ReadFile("FileIn.txt") as ExportData[]; TimeSnapshot(); Console.WriteLine("3. Prepare NDatabase db."); using (var odb = OdbFactory.Open("FileOut.ndb")) { Console.WriteLine("3a. Store items into NDatabase."); foreach (var exportData in res) odb.Store(exportData); TimeSnapshot(); Console.WriteLine("3b. Create index on NDatabase."); odb.IndexManagerFor<ExportData>().AddIndexOn("countryIndex", new[] { "CountryOrArea" }); } TimeSnapshot(); Console.WriteLine("4. Prepare final input file."); // To Write Use: engine.WriteFile("FileOut.txt", res); TimeSnapshot(); Console.WriteLine("5. Start counting EGYPT by FileHelpers."); var stopwatch = new Stopwatch(); stopwatch.Start(); res = engine.ReadFile("FileOut.txt") as ExportData[]; var count = res.Count(x => x.CountryOrArea.Equals("EGYPT")); stopwatch.Stop(); Console.WriteLine("Egypt items: {0}", count); Console.WriteLine("Ellapsed: {0} ms", stopwatch.ElapsedMilliseconds); Console.WriteLine("5. Start counting EGYPT by NDatabase."); stopwatch.Reset(); stopwatch.Start(); long count2; using (var odb = OdbFactory.Open("FileOut.ndb")) { count2 = (from data in odb.AsQueryable<ExportData>() where data.CountryOrArea.Equals("EGYPT") select data).Count(); } stopwatch.Stop(); Console.WriteLine("Egypt items: {0}", count2); Console.WriteLine("Ellapsed: {0} ms", stopwatch.ElapsedMilliseconds); TimeSnapshot(); }
//-> Name:After Write Event Handling //-> Description:Show how to implement write after event //-> File:RunEngine.cs /// <summary> /// Run a record through engine using the write event to filter out unwanted details /// </summary> public override void Run() { var engine = new FileHelperEngine<OrdersFixed>(); var result = engine.ReadFile("Input.txt"); // add our filter logic. engine.AfterWriteRecord += AfterWriteEvent; engine.WriteFile("output.txt", result); }
public void WriteOptionalRead() { engine = new FileHelperEngine(typeof (QuoteMode1)); QuoteMode1[] res = Common.ReadTest(engine, @"Good\QuoteMode1.txt") as QuoteMode1[]; engine.WriteFile("quotetemp1.txt",res); res = engine.ReadFile("quotetemp1.txt") as QuoteMode1[]; ValidateData(res); if (File.Exists("quotetemp1.txt")) File.Delete("quotetemp1.txt"); }
//-> /File //-> Run a record through engine using the write event to filter out unwanted details public override void Run() { //-> File:RunEngine.cs var engine = new FileHelperEngine<OrdersFixed>(); var result = engine.ReadFile("Input.txt"); engine.WriteFile("output.txt", result); //-> /File }
public void WriteOptionalRead() { var engine = new FileHelperEngine<QuoteMode1>(); QuoteMode1[] res = TestCommon.ReadTest<QuoteMode1>(engine, "Good", "QuoteMode1.txt"); engine.WriteFile("quotetemp1.txt",res); res = engine.ReadFile("quotetemp1.txt") as QuoteMode1[]; ValidateData(res); if (File.Exists("quotetemp1.txt")) File.Delete("quotetemp1.txt"); }
public void OrdersQuotedWrite() { var engine = new FileHelperEngine<OrdersQuotedType>(); OrdersQuotedType[] res = TestCommon.ReadTest<OrdersQuotedType>(engine, "Good", "QuotedOrders.txt"); engine.WriteFile("temp2.txt", res); res = (OrdersQuotedType[]) engine.ReadFile("temp2.txt"); Assert.AreEqual(6, res.Length); Assert.AreEqual("VINET", res[0].CustomerID); Assert.AreEqual("TO,SP", res[1].CustomerID); Assert.AreEqual("HA\"AR", res[2].CustomerID); Assert.AreEqual("VICTE", res[3].CustomerID); Assert.AreEqual("S\"U\"P\"\"", res[4].CustomerID); Assert.AreEqual("HANAR", res[5].CustomerID); }
//-> /File //-> FileOut:output.txt //-> <b>Important</b>You can use lambda expressions instead of event methods, for example: //-> File:RunEngineLambda.cs public void RunLambda() { var engine = new FileHelperEngine<OrdersFixed>(); var result = engine.ReadFile("Input.txt"); // add our filter logic. engine.BeforeWriteRecord += (eng, e) => { if (e.Record.Freight < 40) e.SkipThisRecord = true; }; engine.AfterWriteRecord += (eng, e) => { if (e.Record.CustomerID == "HANAR") e.RecordLine = "Insufficient Access"; }; engine.WriteFile("output.txt", result); }
public void Serialize(string fileName) { var engine = new FileHelperEngine(typeof (ModeThreshholdHistogramRecord)); foreach (var species in ModeBinnedExposureDictionary.Exposures.Keys) { var records = new List<ModeThreshholdHistogramRecord>(); foreach (var mode in ModeBinnedExposureDictionary.Exposures[species].Keys) { var data = ModeBinnedExposureDictionary.Exposures[species].Values; var record = new ModeThreshholdHistogramRecord {ModeName = mode.ModeName}; records.Add(record); } var outfile = Path.Combine(Path.GetFileNameWithoutExtension(fileName),"_",species.LatinName, Path.GetExtension(fileName)); engine.WriteFile(outfile,records); } }
public static void Main(string[] args) { var parameters = new OptionSet { { "file=", param => OutputLocation = param }, { "server=", param => TfsServer = param }, { "path=", param => TfsPath = param }, }; parameters.Parse(args); var historyExporter = new TfsHistoryExporter(TfsServer); //var changes = historyExporter.RetrieveAllHistory(TfsPath); var changes = historyExporter.RetrieveAllModifiedFilesForPath(TfsPath); var csvEntries = changes.Select(change => BuildHistoryCsvEntry(change)).ToList(); //var csvEntries = changes.Select(change => BuildHistoryCsvEntry(change)).ToList(); var csvEngine = new FileHelperEngine<ModifiedFilesCsvEntry>(); csvEngine.WriteFile(OutputLocation, csvEntries); }
static void Main() { FileHelperEngine engine = new FileHelperEngine(typeof(Customer)); // To read use: Customer[] custs = (Customer[]) engine.ReadFile("yourfile.txt"); // To write use: engine.WriteFile("yourfile.txt", custs); //If you are using .NET 2.0 or greater is //better if you use the Generics version: // FileHelperEngine engine = new FileHelperEngine<Customer>(); // To read use (no casts =) // Customer[] custs = engine.ReadFile("yourfile.txt"); // To write use: // engine.WriteFile("yourfile.txt", custs); }
public void NameConverterTest2() { var engine = new FileHelperEngine<CustomConvClass>(); CustomConvClass[] res = TestCommon.ReadTest<CustomConvClass>(engine, "Good", "CustomConverter1.txt"); Assert.AreEqual(5, res.Length); engine.WriteFile("tmpCC.txt", res); res = (CustomConvClass[]) engine.ReadFile("tmpCC.txt"); Assert.AreEqual(5, res.Length); for (int i = 0; i < 5; i++) { Assert.AreEqual("Argentina", res[i].Country); Assert.AreEqual("Meli", res[i].Names.LastName); Assert.AreEqual("Marcos", res[i].Names.FirstName); Assert.AreEqual(25, res[i].Age); } if (File.Exists("tmpCC.txt")) File.Delete("tmpCC.txt"); }
static void Main() { FileHelperEngine engine = new FileHelperEngine(typeof(Customer)); // To read use: Customer[] custs = (Customer[])engine.ReadFile("yourfile.txt"); // To write use: engine.WriteFile("yourfile.txt", custs); //If you are using .NET 2.0 or greater is //better if you use the Generics version: // FileHelperEngine engine = new FileHelperEngine<Customer>(); // To read use (no casts =) // Customer[] custs = engine.ReadFile("yourfile.txt"); // To write use: // engine.WriteFile("yourfile.txt", custs); }
public async Task <ServiceResponse <List <GetSystemPOSDto> > > DBtoFileSystem(string path) { ServiceResponse <List <GetSystemPOSDto> > serviceResponse = new ServiceResponse <List <GetSystemPOSDto> >(); try { FileHelperEngine engine = new FileHelperEngine(typeof(SystemPOS)); //var ListSystem = new List<SystemPOS>(); var ListSystem = await _context.SystemPOSs.ToListAsync(); engine.WriteFile(path + "system.txt", ListSystem); serviceResponse.Data = _mapper.Map <List <GetSystemPOSDto> >(ListSystem); } catch (Exception ex) { serviceResponse.Success = false; serviceResponse.Message = ex.Message; } return(serviceResponse); }
static void EventHandling() { Console.WriteLine("Reading Using EventHandlers ..."); Console.WriteLine(); // Estas dos lineas son el uso de la librería FileHelperEngine engine = new FileHelperEngine(typeof(Customer)); engine.BeforeReadRecord += new BeforeReadRecordHandler(BeforeReadRecord); engine.AfterWriteRecord += new AfterWriteRecordHandler(AfterWriteRecord); Customer[] customers = (Customer[])engine.ReadFile(@"..\Data\CustomersDelimited.txt"); // A partir de la versión 1.4.0 se puede // inclusive escribir en una sola línea: // Cliente[] clientes = (Cliente[]) CommonEngine.ReadFile(typeof(Cliente), @"..\Data\ClientesDelimitados.txt"); // Aqui es donde ustedes agregan su código foreach (Customer cli in customers) { Console.WriteLine("Customer: " + cli.CustId.ToString() + " - " + cli.Name); } Console.ReadLine(); Console.WriteLine("Writing data to a delimited file..."); Console.WriteLine(); // write the data to a file engine.WriteFile("temp.txt", customers); Console.WriteLine("Data successful written !!!"); Console.ReadLine(); if (File.Exists("temp.txt")) { File.Delete("temp.txt"); } }
//-> /File //-> FileOut:output.txt //-> <b>Important</b>You can use lambda expressions instead of event methods, for example: //-> File:RunEngineLambda.cs public void RunLambda() { var engine = new FileHelperEngine <OrdersFixed>(); var result = engine.ReadFile("Input.txt"); // add our filter logic. engine.BeforeWriteRecord += (eng, e) => { if (e.Record.Freight < 40) { e.SkipThisRecord = true; } }; engine.AfterWriteRecord += (eng, e) => { if (e.Record.CustomerID == "HANAR") { e.RecordLine = "Insufficient Access"; } }; engine.WriteFile("output.txt", result); }
private void GetRectangle() { var engine = new FileHelperEngine <Objekt>(); var objekty = new List <Objekt>(); foreach (Rectangle rect in canvas.Children.OfType <Rectangle>()) { double leftpos = Canvas.GetLeft(rect); double toppos = Canvas.GetTop(rect); objekty.Add(new Objekt() { name = rect.Name, LeftPos = leftpos, TopPos = toppos, height = rect.Height, width = rect.Width }); engine.WriteFile("Output.Txt", objekty); } }
public void WriteFile() { var engine = new FileHelperEngine<SampleType>(); var res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1); res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile(@"prueba.txt", res); if (File.Exists(@"prueba.txt")) File.Delete(@"prueba.txt"); }
static void Delimited() { Console.WriteLine("Reading Delimited File..."); Console.WriteLine(); // Estas dos lineas son el uso de la librería FileHelperEngine engine = new FileHelperEngine(typeof(Customer)); Customer[] customers = (Customer[]) engine.ReadFile(@"..\Data\CustomersDelimited.txt"); // A partir de la versión 1.4.0 se puede // inclusive escribir en una sola línea: // Cliente[] clientes = (Cliente[]) CommonEngine.ReadFile(typeof(Cliente), @"..\Data\ClientesDelimitados.txt"); // Aqui es donde ustedes agregan su código foreach (Customer cli in customers) { Console.WriteLine(); Console.WriteLine("Customer: " + cli.CustId.ToString() + " - " + cli.Name); Console.WriteLine("Added Date: " + cli.AddedDate.ToString("d-M-yyyy")); Console.WriteLine("Balance: " + cli.Balance.ToString()); Console.WriteLine(); Console.WriteLine("-----------------------------"); } Console.ReadLine(); Console.WriteLine("Writing data to a delimited file..."); Console.WriteLine(); // write the data to a file engine.WriteFile("temp.txt", customers); Console.WriteLine("Data successful written !!!"); Console.ReadLine(); if (File.Exists("temp.txt")) File.Delete("temp.txt"); }
private void Button_Click_1(object sender, RoutedEventArgs e) { try { List <Item> i = new List <Item>(); Player p = new Player(JmenoTB.Text, 100, 0, 0); var engine = new FileHelperEngine <Player>(); var pal = new List <Player>(); pal.Add(p); engine.WriteFile(GetDBFilePath("Player.txt"), pal); var engineItems = new FileHelperEngine <Item>(); engineItems.WriteFile("Items.txt", i); MessageBox.Show("Podařilo se! Klikni na 'Pokračovat'"); JmenoTB.Clear(); } catch { MessageBox.Show("Něco se pokazilo"); } }
public void Serialize(string outFile) { var engine = new FileHelperEngine<SourceModeThreshholdHistogramFileRecord>(); var records = new List<SourceModeThreshholdHistogramFileRecord>(); for (int i = 0; i < _simulation.Scenario.ScenarioSpecies.Count; i++) { var record = new SourceModeThreshholdHistogramFileRecord { SpeciesName = _simulation.Scenario.ScenarioSpecies.ToList()[i].LatinName, ScenarioName = _simulation.Scenario.Name, LocationName = _simulation.Scenario.Location.Name, NumberOfEmitters = ModeDictionary[_simulation.Scenario.ScenarioSpecies.ToList()[i].Guid].Count, SoundEmitterData = new List<Tuple<string, int[]>>(), }; for (int j = 0; j < record.NumberOfEmitters; j++) { var curMode = ModeDictionary[_simulation.Scenario.ScenarioSpecies.ToList()[i].Guid].ToList()[j]; record.SoundEmitterData.Add(new Tuple<string, int[]>(curMode.Key.ModeName, curMode.Value.Bins)); } records.Add(record); } engine.WriteFile(outFile, records); }
static void Main(string[] args) { //var createdObjects = CreatedObjects(SplitString, artistName, headCity, inputString); var engine = new FileHelperEngine <Event>(); //engine.WriteFile("Output.csv", createdObjects); var records = engine.ReadFile("Input.csv"); var finalResults = new List <Event>(); foreach (var record in records) { var createdObjects = CreatedObjects(SplitString, record.Artist, record.City, record.Date, record.Photo); finalResults.AddRange(createdObjects); } var writeEngine = new FileHelperEngine <Event>(); writeEngine.WriteFile("Output.csv", finalResults); }
public void DiscardWriteRead() { var engine = new FileHelperEngine <DiscardType1>(); DiscardType1[] res = engine.ReadFile(FileTest.Good.DiscardFirst1.Path); engine.HeaderText = "This is a new header...."; engine.WriteFile("tempo.txt", res); engine.HeaderText = "none none"; var res2 = (DiscardType1[])engine.ReadFile(@"tempo.txt"); Assert.AreEqual(res.Length, res2.Length); Assert.AreEqual(mExpectedShortHeaderText, engine.HeaderText); if (File.Exists("tempo.txt")) { File.Delete("tempo.txt"); } Assert.AreEqual(4, res.Length); Assert.AreEqual(new DateTime(1314, 12, 11), res[0].Field1); }
public void DiscardWriteRead() { engine = new FileHelperEngine(typeof(DiscardType1)); DiscardType1[] res = (DiscardType1[])TestCommon.ReadTest(engine, @"Good\DiscardFirst1.txt"); engine.HeaderText = "This is a new header...."; engine.WriteFile("tempo.txt", res); engine.HeaderText = "none none"; DiscardType1[] res2 = (DiscardType1[])engine.ReadFile(@"tempo.txt"); Assert.AreEqual(res.Length, res2.Length); Assert.AreEqual("This is a new header....\r\n", engine.HeaderText); if (File.Exists("tempo.txt")) { File.Delete("tempo.txt"); } Assert.AreEqual(4, res.Length); Assert.AreEqual(new DateTime(1314, 12, 11), res[0].Field1); }
public void WriteFile() { var engine = new FileHelperEngine <SampleType>(); var res = new SampleType[2]; res[0] = new SampleType(); res[1] = new SampleType(); res[0].Field1 = DateTime.Now.AddDays(1); res[0].Field2 = "je"; res[0].Field3 = 0; res[1].Field1 = DateTime.Now; res[1].Field2 = "ho"; res[1].Field3 = 2; engine.WriteFile(@"prueba.txt", res); if (File.Exists(@"prueba.txt")) { File.Delete(@"prueba.txt"); } }
private static void WriteToCsv(string fileName, List <ContactDataCsv> contacts) { contacts.Insert(0, new ContactDataCsv { Id = "Id", FirstName = "FirstName", MiddleName = "MiddleName", LastName = "LastName", Nickname = "Nickname", Photo = "Photo", Title = "Title", Company = "Company", Address = "Address", TelephoneHome = "TelephoneHome", TelephoneMobile = "TelephoneMobile", TelephoneWork = "TelephoneWork", TelephoneFax = "TelephoneFax", EMail = "EMail", EMail2 = "EMail2", EMail3 = "EMail3", Homepage = "Homepage", Birthday = "Birthday", Anniversary = "Anniversary", Group = "Group", Secondary = "Secondary", SecondaryAddress = "SecondaryAddress", SecondaryHome = " SecondaryHome", Notes = "Notes", TestObjectInstanceName = "TestObjectInstanceName" }); var engine = new FileHelperEngine <ContactDataCsv>(); engine.WriteFile(HelperBase.GetDataFileFullPath(fileName), contacts); }
/// <summary> /// Execute engine and write out records we define in memory delimited by | /// </summary> public override void Run() { //-> {Example.File:Example.cs} var engine = new FileHelperEngine <Customer>(); var customers = new List <Customer>(); var order1 = new Customer() { CustId = 1, Name = "Antonio Moreno Taquería", AddedDate = new DateTime(2009, 05, 01) }; var order2 = new Customer() { CustId = 2, Name = "Berglunds snabbköp", AddedDate = new DateTime(2009, 05, 02) }; customers.Add(order1); customers.Add(order2); engine.WriteFile("Output.Txt", customers); //-> {/Example.File} Console.WriteLine(engine.WriteString(customers)); }
public void NameConverterTest2() { var engine = new FileHelperEngine <CustomConvClass>(); CustomConvClass[] res = TestCommon.ReadTest <CustomConvClass>(engine, "Good", "CustomConverter1.txt"); Assert.AreEqual(5, res.Length); engine.WriteFile("tmpCC.txt", res); res = (CustomConvClass[])engine.ReadFile("tmpCC.txt"); Assert.AreEqual(5, res.Length); for (int i = 0; i < 5; i++) { Assert.AreEqual("Argentina", res[i].Country); Assert.AreEqual("Meli", res[i].Names.LastName); Assert.AreEqual("Marcos", res[i].Names.FirstName); Assert.AreEqual(25, res[i].Age); } if (File.Exists("tmpCC.txt")) { File.Delete("tmpCC.txt"); } }
public void DiscardWriteRead() { var engine = new FileHelperEngine <DiscardLastType1>(); DiscardLastType1[] res = TestCommon.ReadTest <DiscardLastType1>(engine, "Good", "DiscardLast1.txt"); engine.FooterText = expectedShortFooterText; engine.WriteFile("tempo.txt", res); engine.FooterText = "none none"; DiscardLastType1[] res2 = engine.ReadFile(@"tempo.txt"); Assert.AreEqual(res.Length, res2.Length); Assert.AreEqual(expectedShortFooterText, engine.FooterText); if (File.Exists("tempo.txt")) { File.Delete("tempo.txt"); } Assert.AreEqual(4, res.Length); Assert.AreEqual(new DateTime(1314, 12, 11), res[0].Field1); }
public void WriteNull() { engine = new FileHelperEngine(typeof(SampleType)); SampleType[] res = new SampleType[3]; res[0] = new SampleType(); res[1] = new SampleType(); res[2] = new SampleType(); engine.WriteFile("tempNull.txt", res); res = (SampleType[])engine.ReadFile("tempNull.txt"); Assert.AreEqual(3, res.Length); Assert.AreEqual(3, engine.TotalRecords); Assert.AreEqual(0, engine.ErrorManager.ErrorCount); Assert.AreEqual(DateTime.MinValue, res[0].Field1); Assert.AreEqual("", res[0].Field2); Assert.AreEqual(0, res[0].Field3); if (File.Exists("tempNull.txt")) { File.Delete("tempNull.txt"); } }
static void Main(string[] args) { #region Codes to names dictionaries Dictionary <string, string> states = new Dictionary <string, string> { { "11", "RO" }, { "12", "AC" }, { "13", "AM" }, { "14", "RR" }, { "15", "PA" }, { "16", "AP" }, { "17", "TO" }, { "21", "MA" }, { "22", "PI" }, { "23", "CE" }, { "24", "RN" }, { "25", "PB" }, { "26", "PE" }, { "27", "AL" }, { "28", "SE" }, { "29", "BA" }, { "31", "MG" }, { "32", "ES" }, { "33", "RJ" }, { "35", "SP" }, { "41", "PR" }, { "42", "SC" }, { "43", "RS" }, { "50", "MS" }, { "51", "MT" }, { "52", "GO" }, { "53", "DF" } }; Dictionary <string, string> courses = new Dictionary <string, string> { { "21", "ARQUITETURA E URBANISMO" }, { "72", "TECNOLOGIA EM ANÁLISE E DESENVOLVIMENTO DE SISTEMAS" }, { "73", "TECNOLOGIA EM AUTOMAÇÃO INDUSTRIAL" }, { "76", "TECNOLOGIA EM GESTÃO DA PRODUÇÃO INDUSTRIAL" }, { "79", "TECNOLOGIA EM REDES DE COMPUTADORES" }, { "701", "MATEMÁTICA(BACHARELADO)" }, { "702", "MATEMÁTICA(LICENCIATURA)" }, { "903", "LETRAS - PORTUGUÊS(BACHARELADO)" }, { "904", "LETRAS - PORTUGUÊS(LICENCIATURA)" }, { "905", "LETRAS - PORTUGUÊS E INGLÊS(LICENCIATURA)" }, { "906", "LETRAS - PORTUGUÊS E ESPANHOL(LICENCIATURA)" }, { "1401", "FÍSICA(BACHARELADO)" }, { "1402", "FÍSICA(LICENCIATURA)" }, { "1501", "QUÍMICA(BACHARELADO)" }, { "1502", "QUÍMICA(LICENCIATURA)" }, { "1601", "CIÊNCIAS BIOLÓGICAS(BACHARELADO)" }, { "1602", "CIÊNCIAS BIOLÓGICAS(LICENCIATURA)" }, { "2001", "PEDAGOGIA(LICENCIATURA)" }, { "2401", "HISTÓRIA(BACHARELADO)" }, { "2402", "HISTÓRIA(LICENCIATURA)" }, { "2501", "ARTES VISUAIS(LICENCIATURA)" }, { "3001", "GEOGRAFIA(BACHARELADO)" }, { "3002", "GEOGRAFIA(LICENCIATURA)" }, { "3201", "FILOSOFIA(BACHARELADO)" }, { "3202", "FILOSOFIA(LICENCIATURA)" }, { "3502", "EDUCAÇÃO FÍSICA(LICENCIATURA)" }, { "4004", "CIÊNCIA DA COMPUTAÇÃO(BACHARELADO)" }, { "4005", "CIÊNCIA DA COMPUTAÇÃO(LICENCIATURA)" }, { "4006", "SISTEMAS DE INFORMAÇÃO" }, { "4301", "MÚSICA(LICENCIATURA)" }, { "5401", "CIÊNCIAS SOCIAIS(BACHARELADO)" }, { "5402", "CIÊNCIAS SOCIAIS(LICENCIATURA)" }, { "5710", "ENGENHARIA CIVIL" }, { "5806", "ENGENHARIA ELÉTRICA" }, { "5809", "ENGENHARIA DE COMPUTAÇÃO" }, { "5814", "ENGENHARIA DE CONTROLE E AUTOMAÇÃO" }, { "5902", "ENGENHARIA MECÂNICA" }, { "6008", "ENGENHARIA QUÍMICA" }, { "6009", "ENGENHARIA DE ALIMENTOS" }, { "6208", "ENGENHARIA DE PRODUÇÃO" }, { "6306", "ENGENHARIA" }, { "6307", "ENGENHARIA AMBIENTAL" }, { "6405", "ENGENHARIA FLORESTAL" } }; Dictionary <string, string> incomes = new Dictionary <string, string> { { "a", "Até 1,5 salário mínimo" }, { "b", "De 1,5 a 3 salários mínimos" }, { "c", "De 3 a 4,5 salários mínimos" }, { "d", "De 4,5 a 6 salários mínimos" }, { "e", "De 6 a 10 salários mínimos" }, { "f", "De 10 a 30 salários mínimos" }, { "g", "Acima de 30 salários mínimos" }, { "", "Não Informado" } }; #endregion Dictionary <string, List <double> > stateGrades = new Dictionary <string, List <double> >(); Dictionary <string, Dictionary <string, List <double> > > courseIncomeGrades = new Dictionary <string, Dictionary <string, List <double> > >(); Dictionary <string, Dictionary <string, Dictionary <string, List <double> > > > courseStateIncomeGrades = new Dictionary <string, Dictionary <string, Dictionary <string, List <double> > > >(); Dictionary <string, int> countIncomes = new Dictionary <string, int>(); Dictionary <string, Dictionary <string, Dictionary <string, int> > > courseStateIncomeCount = new Dictionary <string, Dictionary <string, Dictionary <string, int> > >(); Dictionary <string, Dictionary <string, int> > stateIncomeCount = new Dictionary <string, Dictionary <string, int> >(); Dictionary <string, Dictionary <string, int> > courseIncomeCount = new Dictionary <string, Dictionary <string, int> >(); Dictionary <string, Dictionary <string, List <double> > > stateCourseGrades = new Dictionary <string, Dictionary <string, List <double> > >(); FileHelperEngine <EnadeObject> readEngine = new FileHelperEngine <EnadeObject>(); int count = 1; Console.WriteLine("Reading file and coverting to object"); var records = readEngine.ReadFile("C:\\Users\\leobr\\Dropbox\\UFF\\Visualização de dados\\microdados_enade_2014\\2.DADOS\\microdados_enade_2014_editado.csv"); foreach (EnadeObject currentRecord in records) { Console.WriteLine("Getting record " + count + " from " + records.Length); // Filling state/grades dictionary if (stateGrades.ContainsKey(currentRecord.codigoUF)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateGrades[currentRecord.codigoUF].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateGrades.Add(currentRecord.codigoUF, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { stateGrades.Add(currentRecord.codigoUF, new List <double>()); } } //Filling course/income/grades dictionary if (courseIncomeGrades.ContainsKey(currentRecord.codigoCurso)) { if (courseIncomeGrades[currentRecord.codigoCurso].ContainsKey(currentRecord.faixaDeRenda)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseIncomeGrades[currentRecord.codigoCurso][currentRecord.faixaDeRenda].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double>()); } } } else { courseIncomeGrades.Add(currentRecord.codigoCurso, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, new List <double>()); } } //Filling course/state/income/grades dictionary if (courseStateIncomeGrades.ContainsKey(currentRecord.codigoCurso)) { if (courseStateIncomeGrades[currentRecord.codigoCurso].ContainsKey(currentRecord.codigoUF)) { //If have all keys insert grade if (courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].ContainsKey(currentRecord.faixaDeRenda)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF][currentRecord.faixaDeRenda].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> ()); } } } else { // If don't have codigo uf key courseStateIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double>()); } } } else { // If don't have any key courseStateIncomeGrades.Add(currentRecord.codigoCurso, new Dictionary <string, Dictionary <string, List <double> > >()); courseStateIncomeGrades[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { courseStateIncomeGrades[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, new List <double>()); } } // Count incomes if (countIncomes.ContainsKey(currentRecord.faixaDeRenda)) { countIncomes[currentRecord.faixaDeRenda]++; } else { countIncomes.Add(currentRecord.faixaDeRenda, 1); } // Course/State/CountIncome //Filling course/state/income/grades dictionary if (courseStateIncomeCount.ContainsKey(currentRecord.codigoCurso)) { if (courseStateIncomeCount[currentRecord.codigoCurso].ContainsKey(currentRecord.codigoUF)) { //If have all keys insert grade if (courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].ContainsKey(currentRecord.faixaDeRenda)) { courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF][currentRecord.faixaDeRenda]++; } else { courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } } else { // If don't have codigo uf key courseStateIncomeCount[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, int>()); courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } } else { // If don't have any key courseStateIncomeCount.Add(currentRecord.codigoCurso, new Dictionary <string, Dictionary <string, int> >()); courseStateIncomeCount[currentRecord.codigoCurso].Add(currentRecord.codigoUF, new Dictionary <string, int>()); courseStateIncomeCount[currentRecord.codigoCurso][currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } //State/Income Count //Filling course/income/grades dictionary if (stateIncomeCount.ContainsKey(currentRecord.codigoUF)) { if (stateIncomeCount[currentRecord.codigoUF].ContainsKey(currentRecord.faixaDeRenda)) { stateIncomeCount[currentRecord.codigoUF][currentRecord.faixaDeRenda]++; } else { stateIncomeCount[currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } } else { stateIncomeCount.Add(currentRecord.codigoUF, new Dictionary <string, int>()); stateIncomeCount[currentRecord.codigoUF].Add(currentRecord.faixaDeRenda, 1); } //Course/Income Count if (courseIncomeCount.ContainsKey(currentRecord.codigoCurso)) { if (courseIncomeCount[currentRecord.codigoCurso].ContainsKey(currentRecord.faixaDeRenda)) { courseIncomeCount[currentRecord.codigoCurso][currentRecord.faixaDeRenda]++; } else { courseIncomeCount[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, 1); } } else { courseIncomeCount.Add(currentRecord.codigoCurso, new Dictionary <string, int>()); courseIncomeCount[currentRecord.codigoCurso].Add(currentRecord.faixaDeRenda, 1); } //Filling state/course/grades dictionary if (stateCourseGrades.ContainsKey(currentRecord.codigoUF)) { if (stateCourseGrades[currentRecord.codigoUF].ContainsKey(currentRecord.codigoCurso)) { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateCourseGrades[currentRecord.codigoUF][currentRecord.codigoCurso].Add(Double.Parse(currentRecord.notaGeral) / 10); } } else { if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double>()); } } } else { stateCourseGrades.Add(currentRecord.codigoUF, new Dictionary <string, List <double> >()); if (!String.IsNullOrWhiteSpace(currentRecord.notaGeral)) { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double> { Double.Parse(currentRecord.notaGeral) / 10 }); } else { stateCourseGrades[currentRecord.codigoUF].Add(currentRecord.codigoCurso, new List <double>()); } } count++; } Console.WriteLine("Writing files"); #region CSV FILES //Writing Files List <WriteFileObject> stateGradesRecords = new List <WriteFileObject>(); List <WriteFileObject> courseIncomeGradesRecords = new List <WriteFileObject>(); WriteFileObject record; double gradesSum = 0; FileHelperEngine <WriteFileObject> writeEngine = new FileHelperEngine <WriteFileObject>(); // States/grades file foreach (string currentState in stateGrades.Keys) { record = new WriteFileObject(); gradesSum = 0; // Changing states codes to names, to write the file if (states.ContainsKey(currentState)) { record.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } // Calculating average grade foreach (double currentGrade in stateGrades[currentState]) { gradesSum += currentGrade; } record.average = Math.Round((gradesSum / stateGrades[currentState].Count), 2); stateGradesRecords.Add(record); } writeEngine.WriteFile("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\StatesGrades.csv", stateGradesRecords); // Course/Income/Grades file foreach (string currentCourse in courseIncomeGrades.Keys) { foreach (string currentIncome in courseIncomeGrades[currentCourse].Keys) { record = new WriteFileObject(); gradesSum = 0; // Changing courses and incomes codes to names, to write the file if (courses.ContainsKey(currentCourse)) { record.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } if (incomes.ContainsKey(currentIncome)) { record.income = incomes[currentIncome]; } else { Console.WriteLine("Faixa de renda não encontrada na lista"); continue; } // Calculating average grade foreach (double currentGrade in courseIncomeGrades[currentCourse][currentIncome]) { gradesSum += currentGrade; } record.average = Math.Round((gradesSum / courseIncomeGrades[currentCourse][currentIncome].Count), 2); courseIncomeGradesRecords.Add(record); } } writeEngine.WriteFile("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\CourseIncomeGrades.csv", courseIncomeGradesRecords); #endregion #region JSON FILES CourseIncomeObject ciObj; StreamWriter sw = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\courseIncomeGrade.json"); // Course/Income/Grades file foreach (string currentCourse in courseIncomeGrades.Keys) { ciObj = new CourseIncomeObject(); foreach (string currentIncome in courseIncomeGrades[currentCourse].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { ciObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } switch (currentIncome) { case "a": ciObj.incomes.a = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "b": ciObj.incomes.b = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "c": ciObj.incomes.c = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "d": ciObj.incomes.d = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "e": ciObj.incomes.e = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "f": ciObj.incomes.f = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; case "g": ciObj.incomes.g = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; default: ciObj.incomes.naoInformado = CalculateAvgGrade(courseIncomeGrades, currentCourse, currentIncome); break; } } //Writting on file sw.WriteLine(JsonConvert.SerializeObject(ciObj)); sw.Flush(); } sw.Close(); CourseStateIncomeObject csiObj; StreamWriter sw2 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\courseStateIncomeGrade.json"); // Course/Income/Grades file foreach (string currentCourse in courseStateIncomeGrades.Keys) { foreach (string currentState in courseStateIncomeGrades[currentCourse].Keys) { csiObj = new CourseStateIncomeObject(); foreach (string currentIncome in courseStateIncomeGrades[currentCourse][currentState].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { csiObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } if (states.ContainsKey(currentState)) { csiObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } switch (currentIncome) { case "a": csiObj.incomes.a = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "b": csiObj.incomes.b = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "c": csiObj.incomes.c = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "d": csiObj.incomes.d = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "e": csiObj.incomes.e = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "f": csiObj.incomes.f = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; case "g": csiObj.incomes.g = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; default: csiObj.incomes.naoInformado = CalculateCourseStateAvgGrade(courseStateIncomeGrades, currentCourse, currentState, currentIncome); break; } } //Writting on file sw2.WriteLine(JsonConvert.SerializeObject(csiObj)); sw2.Flush(); } } sw2.Close(); // Count Income CountIncomeObject countIncomeObj; StreamWriter sw3 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\countIncome.json"); foreach (string currentIncome in countIncomes.Keys) { countIncomeObj = new CountIncomeObject(); if (incomes.ContainsKey(currentIncome)) { countIncomeObj.income = incomes[currentIncome]; } else { Console.WriteLine("Renda não encontrada na lista"); continue; } countIncomeObj.count = countIncomes[currentIncome]; sw3.WriteLine(JsonConvert.SerializeObject(countIncomeObj)); sw3.Flush(); } sw3.Close(); CourseStateIncomeCountObject courseStateIncCountObj = new CourseStateIncomeCountObject(); StreamWriter sw4 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\CourseStateIncomeCount.json"); foreach (string currentCourse in courseStateIncomeCount.Keys) { foreach (string currentState in courseStateIncomeCount[currentCourse].Keys) { courseStateIncCountObj = new CourseStateIncomeCountObject(); foreach (string currentIncome in courseStateIncomeCount[currentCourse][currentState].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { courseStateIncCountObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } if (states.ContainsKey(currentState)) { courseStateIncCountObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } switch (currentIncome) { case "a": courseStateIncCountObj.incomes.a = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "b": courseStateIncCountObj.incomes.b = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "c": courseStateIncCountObj.incomes.c = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "d": courseStateIncCountObj.incomes.d = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "e": courseStateIncCountObj.incomes.e = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "f": courseStateIncCountObj.incomes.f = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; case "g": courseStateIncCountObj.incomes.g = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; default: courseStateIncCountObj.incomes.naoInformado = courseStateIncomeCount[currentCourse][currentState][currentIncome]; break; } } //Writting on file sw4.WriteLine(JsonConvert.SerializeObject(courseStateIncCountObj)); sw4.Flush(); } } sw4.Close(); StateIncomeCountObject stateIncomeCountObj = new StateIncomeCountObject(); StreamWriter sw5 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\StateIncomeCount.json"); foreach (string currentState in stateIncomeCount.Keys) { stateIncomeCountObj = new StateIncomeCountObject(); foreach (string currentIncome in stateIncomeCount[currentState].Keys) { // Changing courses codes to names, to write the file if (states.ContainsKey(currentState)) { stateIncomeCountObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } switch (currentIncome) { case "a": stateIncomeCountObj.incomes.a = stateIncomeCount[currentState][currentIncome]; break; case "b": stateIncomeCountObj.incomes.b = stateIncomeCount[currentState][currentIncome]; break; case "c": stateIncomeCountObj.incomes.c = stateIncomeCount[currentState][currentIncome]; break; case "d": stateIncomeCountObj.incomes.d = stateIncomeCount[currentState][currentIncome]; break; case "e": stateIncomeCountObj.incomes.e = stateIncomeCount[currentState][currentIncome]; break; case "f": stateIncomeCountObj.incomes.f = stateIncomeCount[currentState][currentIncome]; break; case "g": stateIncomeCountObj.incomes.g = stateIncomeCount[currentState][currentIncome]; break; default: stateIncomeCountObj.incomes.naoInformado = stateIncomeCount[currentState][currentIncome]; break; } } //Writting on file sw5.WriteLine(JsonConvert.SerializeObject(stateIncomeCountObj)); sw5.Flush(); } sw5.Close(); CourseIncomeCountObject courseIncomeCountObj = new CourseIncomeCountObject(); StreamWriter sw6 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\CourseIncomeCount.json"); foreach (string currentCourse in courseIncomeCount.Keys) { courseIncomeCountObj = new CourseIncomeCountObject(); foreach (string currentIncome in courseIncomeCount[currentCourse].Keys) { // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { courseIncomeCountObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } switch (currentIncome) { case "a": courseIncomeCountObj.incomes.a = courseIncomeCount[currentCourse][currentIncome]; break; case "b": courseIncomeCountObj.incomes.b = courseIncomeCount[currentCourse][currentIncome]; break; case "c": courseIncomeCountObj.incomes.c = courseIncomeCount[currentCourse][currentIncome]; break; case "d": courseIncomeCountObj.incomes.d = courseIncomeCount[currentCourse][currentIncome]; break; case "e": courseIncomeCountObj.incomes.e = courseIncomeCount[currentCourse][currentIncome]; break; case "f": courseIncomeCountObj.incomes.f = courseIncomeCount[currentCourse][currentIncome]; break; case "g": courseIncomeCountObj.incomes.g = courseIncomeCount[currentCourse][currentIncome]; break; default: courseIncomeCountObj.incomes.naoInformado = courseIncomeCount[currentCourse][currentIncome]; break; } } //Writting on file sw6.WriteLine(JsonConvert.SerializeObject(courseIncomeCountObj)); sw6.Flush(); } sw6.Close(); StateCourseGradeObject stateCourseGradeObj = new StateCourseGradeObject(); StreamWriter sw7 = new StreamWriter("C:\\Projetos\\UFF\\VisEnade\\DataWorker\\DataWorker\\DataWorker\\Output\\StateCourseGrade.json"); foreach (string currentState in stateCourseGrades.Keys) { foreach (string currentCourse in stateCourseGrades[currentState].Keys) { stateCourseGradeObj = new StateCourseGradeObject(); // Changing courses codes to names, to write the file if (states.ContainsKey(currentState)) { stateCourseGradeObj.state = states[currentState]; } else { Console.WriteLine("Estado não encontrado na lista"); continue; } // Changing courses codes to names, to write the file if (courses.ContainsKey(currentCourse)) { stateCourseGradeObj.course = courses[currentCourse]; } else { Console.WriteLine("Curso não encontrado na lista"); continue; } stateCourseGradeObj.grade = CalculateStateCourseAvgGrade(stateCourseGrades, currentState, currentCourse); //Writting on file sw7.WriteLine(JsonConvert.SerializeObject(stateCourseGradeObj)); sw7.Flush(); } } sw7.Close(); #endregion Console.WriteLine("Arquivos finalizados"); }
public void Write <T>(string path, IEnumerable <T> records) where T : class { var engine = new FileHelperEngine <T>(Encoding.UTF8); engine.WriteFile(path, records); }
private static void writeRecords(IEnumerable <NetBankRecordWrite> recordWrites, string pathWrite) { var engine = new FileHelperEngine <NetBankRecordWrite>(); engine.WriteFile(pathWrite, recordWrites); }
public async Task RunAsync(string startDate, string endDate) { var engine = new FileHelperEngine <SapFiDelimited>(); var sapFiFile = new List <SapFiDelimited>(); var consigmentRequests = new List <ConsigmentRequest>(); var client = new HttpClient(); client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", m_ostAdminToken); var requestInvoice = true; var invoicePage = 1; var invoiceSize = 20; while (requestInvoice) { var requestUri = new Uri($"{m_ostBaseUrl}/api/consigment-requests/paid-all-within-range/{startDate}/{endDate}/?size={invoiceSize}&page={invoicePage}"); var response = await client.GetAsync(requestUri); string output; Console.WriteLine($"RequestUri: {requestUri}"); Console.WriteLine($"Status: {(int)response.StatusCode} {response.ReasonPhrase}"); if (response.IsSuccessStatusCode) { output = await response.Content.ReadAsStringAsync(); } else { return; } var json = JObject.Parse(output).SelectToken("_results"); consigmentRequests.AddRange(json.Select(jtok => jtok.ToJson().DeserializeFromJson <ConsigmentRequest>())); Console.WriteLine($"Invoice count: {consigmentRequests.Count} ....."); var invoiceCount = JObject.Parse(output).SelectToken("_count").Value <int>(); invoicePage = JObject.Parse(output).SelectToken("_page").Value <int>(); invoiceSize = JObject.Parse(output).SelectToken("_size").Value <int>(); if (invoicePage * invoiceSize >= invoiceCount) { requestInvoice = false; } invoicePage++; } var sequenceNumberCount = 1; foreach (var consigmentRequest in consigmentRequests) { decimal domesticSubTotal = 0; decimal domesticBaseRateTotal = 0; decimal domesticHandlingSurchargeTotal = 0; decimal domesticFuelSurchargeTotal = 0; decimal domesticGstTotal = 0; decimal domesticInsuranceTotal = 0; decimal domesticInsurancePosCoverageTotal = 0; var domesticProductCount = 0; var domesticInsuranceProductCount = 0; decimal internationalSubTotal = 0; decimal internationalBaseRateTotal = 0; decimal internationalHandlingSurchargeTotal = 0; decimal internationalFuelSurchargeTotal = 0; decimal internationalGstTotal = 0; decimal internationalInsuranceTotal = 0; var internationalProductCount = 0; var internationalInsuranceProductCount = 0; foreach (var consigment in consigmentRequest.Consignments) { if (consigment.Produk.IsInternational) { internationalSubTotal += consigment.Bill.SubTotal3; internationalBaseRateTotal += consigment.Bill.BaseRate; internationalProductCount += 1; } else { domesticSubTotal += consigment.Bill.SubTotal3; domesticBaseRateTotal += consigment.Bill.BaseRate; domesticProductCount += 1; } foreach (var c in consigment.Bill.AddOnsC) { if (consigment.Produk.IsInternational) { if (c.Code.Equals("S13") || c.Name.Equals("International Fuel Surcharge - OD")) { internationalFuelSurchargeTotal += c.Charge; } if (c.Code.Equals("S14") || c.Name.Equals("International Handling Surcharge - OD")) { internationalHandlingSurchargeTotal += c.Charge; } } else { if (c.Code.Equals("S12") || c.Name.Equals("Domestic Fuel Surcharge - OD")) { domesticFuelSurchargeTotal += c.Charge; } if (c.Code.Equals("S11") || c.Name.Equals("Domestic Handling Surcharge - OD")) { domesticHandlingSurchargeTotal += c.Charge; } } } foreach (var d in consigment.Bill.AddOnsD) { if (consigment.Produk.IsInternational) { if (d.Code.Equals("S02") || d.Name.Equals("GST Output Tax - Cus")) { internationalGstTotal += d.Charge; } } else { if (d.Code.Equals("S01") || d.Name.Equals("GST Output Tax - Cus")) { domesticGstTotal += d.Charge; } } } foreach (var a in consigment.Bill.AddOnsA) { if (consigment.Produk.IsInternational) { if (!a.Code.Equals("V29") && !a.Name.Equals("Ezisend Insurance - General")) { continue; } internationalInsuranceTotal += a.Charge; internationalInsuranceProductCount += 1; } else { if (!a.Code.Equals("V29") && !a.Name.Equals("Ezisend Insurance - General")) { continue; } domesticInsuranceTotal += a.Charge; domesticInsuranceProductCount += 1; } } foreach (var b in consigment.Bill.AddOnsB) { if (consigment.Produk.IsInternational) { continue; } if (b.Code.Equals("V33") || b.Name.Equals("Pos Coverage")) { domesticInsurancePosCoverageTotal += b.Charge; } } } //Locally calculate GST to avoid rounding #6275 domesticGstTotal = GstCalculation(domesticSubTotal); internationalGstTotal = 0; const decimal pickupCharge = 5.00m; var pickupChargeGst = decimal.Multiply(pickupCharge, 0.06m); var quantity = domesticProductCount + internationalProductCount; var domesticAndInternationalProductTotal = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "40", AccountNumber = "273621", //Amount = domesticGrandTotal + internationalGrandTotal + pickupCharge + pickupChargeGst, //Locally calculate GST to avoid rounding #6275 Amount = domesticSubTotal + domesticGstTotal + internationalSubTotal + internationalGstTotal + pickupCharge + pickupChargeGst, CostCenter = "11523003", Quantity = quantity, TaxCode = "OS", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "MP00003", Text = "1Alliance Online", SequenceNumber = sequenceNumberCount }; quantity = 1; var domesticAndInternationalPickupTotal = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "50", AccountNumber = "620301", Amount = pickupCharge, CostCenter = "11523003", Quantity = quantity, TaxCode = "SR", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "C305101", Text = $"{quantity}Courier Pickup Service", SequenceNumber = sequenceNumberCount }; quantity = domesticProductCount; var domesticProductBaseRate = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "50", AccountNumber = "620102", Amount = domesticBaseRateTotal, CostCenter = "11523003", Quantity = quantity, TaxCode = "SR", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "C001101", Text = $"{quantity}Next Day Delivery (NDD)", SequenceNumber = sequenceNumberCount }; quantity = domesticProductCount; var domesticProductHandlingSurcharge = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "50", AccountNumber = "620102", Amount = domesticHandlingSurchargeTotal, CostCenter = "11523003", Quantity = quantity, TaxCode = "SR", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "C501101", Text = $"{quantity}Domestic Handling Surcharge - OD", SequenceNumber = sequenceNumberCount }; quantity = domesticProductCount; var domesticProductFuelSurcharge = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "50", AccountNumber = "620402", Amount = domesticFuelSurchargeTotal, CostCenter = "11523003", Quantity = quantity, TaxCode = "SR", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "C500101", Text = $"{quantity}Domestic Fuel Surcharge - OD", SequenceNumber = sequenceNumberCount }; quantity = domesticInsuranceProductCount + internationalInsuranceProductCount; var domesticAndInternationalProductInsurance = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "50", AccountNumber = "515921", Amount = domesticInsuranceTotal + internationalInsuranceTotal, CostCenter = "11523003", Quantity = quantity, TaxCode = "SR", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "C306102", Text = $"{quantity}Ezisend Insurance - General", SequenceNumber = sequenceNumberCount }; quantity = domesticProductCount + 1; var domesticProductGst = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "50", AccountNumber = "542402", Amount = domesticGstTotal + pickupChargeGst, CostCenter = "11523003", Quantity = quantity, TaxCode = "SR", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "GSTS102", Text = $"{quantity}GST Output Tax - Cus", SequenceNumber = sequenceNumberCount }; quantity = domesticProductCount; var domesticProductPosCoverage = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "50", AccountNumber = "631404", Amount = domesticInsurancePosCoverageTotal, CostCenter = "11523003", Quantity = quantity, TaxCode = "SR", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "C025100", Text = $"{quantity}Pos Coverage", SequenceNumber = sequenceNumberCount }; quantity = internationalProductCount; var internationalProductBaseRate = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "50", AccountNumber = "620104", Amount = internationalBaseRateTotal, CostCenter = "11523003", Quantity = quantity, TaxCode = "ZR", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "C002101", Text = $"{quantity}Express Mail Service (EMS)", SequenceNumber = sequenceNumberCount }; quantity = internationalProductCount; var internationalProductHandlingSurcharge = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "50", AccountNumber = "620104", Amount = internationalHandlingSurchargeTotal, CostCenter = "11523003", Quantity = quantity, TaxCode = "ZR", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "C002103", Text = $"{quantity}International Handling Surcharge - OD", SequenceNumber = sequenceNumberCount }; quantity = internationalProductCount; var internationalProductFuelSurcharge = new SapFiDelimited() { DocumentDate = consigmentRequest.CreatedDate, PostingDate = consigmentRequest.Payment.Date, DocumentType = "XN", Currency = "MYR", ExchangeRate = string.Empty, Reference = "OST", DocumentHeaderText = string.Empty, PostingKey = "50", AccountNumber = "620404", Amount = internationalFuelSurchargeTotal, CostCenter = "11523003", Quantity = quantity, TaxCode = "ZR", Assignment = consigmentRequest.ReferenceNo, ReferenceKey = "C002102", Text = $"{quantity}International Fuel Surcharge - OD", SequenceNumber = sequenceNumberCount }; if (domesticAndInternationalProductTotal.Amount > 0) { sapFiFile.Add(domesticAndInternationalProductTotal); } if (domesticAndInternationalPickupTotal.Amount > 0) { sapFiFile.Add(domesticAndInternationalPickupTotal); } if (domesticProductBaseRate.Amount > 0) { sapFiFile.Add(domesticProductBaseRate); } if (domesticProductHandlingSurcharge.Amount > 0) { sapFiFile.Add(domesticProductHandlingSurcharge); } if (domesticProductFuelSurcharge.Amount > 0) { sapFiFile.Add(domesticProductFuelSurcharge); } if (internationalProductBaseRate.Amount > 0) { sapFiFile.Add(internationalProductBaseRate); } if (internationalProductHandlingSurcharge.Amount > 0) { sapFiFile.Add(internationalProductHandlingSurcharge); } if (internationalProductFuelSurcharge.Amount > 0) { sapFiFile.Add(internationalProductFuelSurcharge); } if (domesticAndInternationalProductInsurance.Amount > 0) { sapFiFile.Add(domesticAndInternationalProductInsurance); } if (domesticProductGst.Amount > 0) { sapFiFile.Add(domesticProductGst); } if (domesticProductPosCoverage.Amount > 0) { sapFiFile.Add(domesticProductPosCoverage); } sequenceNumberCount++; } var path = $@"{m_ostSapFolder}\OST_CACC_HQ_{DateTime.Now:yyyyMMdd-HHmmss}.txt"; engine.WriteFile(path, sapFiFile); using (StreamWriter sw = File.AppendText(path)) { sw.WriteLine("EOF"); } }
public void test() { //var engine = new FileHelperEngine<Projects>(); var dir = new DirectoryInfo(@"C:\Users\vanca\source\repos"); FileInfo[] files = dir.GetFiles(); var engine = new FileHelperEngine <Projects>(); DirectoryInfo[] dirs = dir.GetDirectories(); ObservableCollection <Projects> filesCol = new ObservableCollection <Projects>(); foreach (var item in dir.GetFiles("*.sln", SearchOption.AllDirectories)) { string projectPath = item.DirectoryName; var dir2 = new DirectoryInfo(@item.DirectoryName); //Debug.WriteLine(item.DirectoryName); Projects project = new Projects("sad", "asd", "sad", "das"); string path = projectPath + "\\data.csv"; //Debug.WriteLine(projectPath); if (!System.IO.File.Exists(@path)) { using (System.IO.FileStream fs = System.IO.File.Create(@path)) { fs.WriteByte(0); } } foreach (var item2 in dir2.GetFiles("*.exe", SearchOption.AllDirectories)) { if (System.IO.Path.GetFileNameWithoutExtension(item.Name) == System.IO.Path.GetFileNameWithoutExtension(item2.Name)) { string smtn = Directory.GetParent(item2.DirectoryName).ToString(); string fldr = smtn.Substring(smtn.Length - 3); if (!(fldr == "obj")) { string prettyPath = System.IO.Path.GetFileName(System.IO.Path.GetDirectoryName(item2.FullName)).ToString() + "/" + item2.Name.ToString(); string fullName = item2.FullName.ToString();//System.IO.Path.GetFileNameWithoutExtension(item2.Name) + ".sln" string desc = "none"; try { desc = ReadDescription(path); } catch { } Projects projekt = new Projects(prettyPath, fullName, item2.Name, projectPath, desc); filesCol.Add(projekt); List <Projects> listProject = new List <Projects>() { projekt }; engine.WriteFile(projectPath + "\\data.csv", listProject); //filesCol.Add(System.IO.Path.GetFileName(System.IO.Path.GetDirectoryName(item2.FullName)) + "/" + item2.Name); //filesCol.Add(System.IO.Path.GetFileName(System.IO.Path.GetDirectoryName(item2.FullName)) + "/" + item2.Name); } } } } VypisVsech.ItemsSource = filesCol; /*var carMake = files * .Where(item => item.Extension == ".exe") * .Select(item => item); * foreach (var item in carMake) * { * }*/ }
static void Main(string[] args) { // set number of trophic levels var nLevels = 8; // Flag for Holling type I or Holling type II grazing // 1 for Holling I, 2 for Holling II // Results for Holling type II looks strange - have to investigate. var holling = 1; // set time step parameters var dt = 0.01; var maxTime = 10000.0; var nStepMax = Convert.ToInt32(Math.Round(maxTime / dt)); var timeOut = 1000.0; var nStepOut = nStepMax * timeOut / maxTime; var nStepOutMax = Convert.ToInt32(nStepMax / nStepOut); // Initialize working arrays var bmassN = new double[] { 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 }; var dBmassNDt = new double[nLevels]; // Initialize output arrays var bmassN_Out = new double[nLevels, nStepOutMax]; var autotrophy_Out = new double[nLevels, nStepOutMax]; var heterotrophy_Out = new double[nLevels, nStepOutMax]; var predation_Out = new double[nLevels, nStepOutMax]; var respiration_Out = new double[nLevels, nStepOutMax]; var time_Out = new double[nStepOutMax]; var nitrate_Out = new double[nStepOutMax]; // Define cell volumes var cellVol = new double[nLevels]; cellVol[0] = Math.Pow(10.0, 1.0); cellVol[1] = Math.Pow(10.0, 1.0); cellVol[2] = Math.Pow(10.0, 1.5); cellVol[3] = Math.Pow(10.0, 1.5); cellVol[4] = Math.Pow(10.0, 2.0); cellVol[5] = Math.Pow(10.0, 2.0); cellVol[6] = Math.Pow(10.0, 2.5); cellVol[7] = Math.Pow(10.0, 2.5); // Print cell volumes to screen Console.WriteLine("[{0}]", string.Join(", ", cellVol)); // Insert code that prints cell volumes to a file // Initialize arrays for cell quotas var quotaCarbon = new double[nLevels]; var quotaNitrate = new double[nLevels]; var aCell = 18.7; var bCell = 0.89; // Calculate carbon cell quota for each size group for (var i = 0; i < quotaCarbon.Length; i++) { quotaCarbon[i] = aCell * Math.Pow(cellVol[i], bCell); } // Calculate nitrate cell quota for each size group based on Redfield proportions for (var i = 0; i < quotaNitrate.Length; i++) { quotaNitrate[i] = quotaCarbon[i] * (16.0 / 106.0); } // Convert nitrate quota to micromoles N cell-1 for (var i = 0; i < quotaNitrate.Length; i++) { quotaNitrate[i] = quotaNitrate[i] * 1.0e6 / 1.0e15; } // Initiate arrays for autotrophic traits var vmaxN = new double[nLevels]; var kn = new double[nLevels]; var kResp = new double[nLevels]; var specVmaxN = new double[nLevels]; // Set even numbered types to be obligate autotrophs vmaxN[0] = 9.1e-9 * Math.Pow(cellVol[0], 0.67); vmaxN[2] = 9.1e-9 * Math.Pow(cellVol[2], 0.67); vmaxN[4] = 9.1e-9 * Math.Pow(cellVol[4], 0.67); vmaxN[6] = 9.1e-9 * Math.Pow(cellVol[6], 0.67); // Set odd numbered types to be obligate heterotrophs vmaxN[1] = 0.0; vmaxN[3] = 0.0; vmaxN[5] = 0.0; vmaxN[7] = 0.0; // Set half-saturation constant for all types for (var i = 0; i < kn.Length; i++) { kn[i] = 0.17 * Math.Pow(cellVol[i], 0.27); } // Set mortality term to be the same for all types for (var i = 0; i < kResp.Length; i++) { kResp[i] = 0.03; } // for (var i = 0; i < specVmaxN.Length; i++) { specVmaxN[i] = vmaxN[i] / quotaNitrate[i]; } // Print autotrophic trait arrays to console Console.WriteLine("vmaxN:"); Console.WriteLine("[{0}]", string.Join(", ", vmaxN)); Console.WriteLine("specVmaxN:"); Console.WriteLine("[{0}]", string.Join(", ", specVmaxN)); Console.WriteLine("kn:"); Console.WriteLine("[{0}]", string.Join(", ", kn)); // Initiate arrays for heterotrophic traits var gmax = new double[nLevels, nLevels]; var gmax1 = new double[nLevels, nLevels]; var kbN = new double[nLevels, nLevels]; var gamma = new double[nLevels, nLevels]; var eye = new double[nLevels, nLevels]; var grazInteract = new double[nLevels, nLevels]; // Populate grazing interaction matrix grazInteract[0, 1] = 1.0; grazInteract[2, 3] = 1.0; grazInteract[4, 5] = 1.0; grazInteract[6, 7] = 1.0; // Figure out how to print a matrix to the console var aGraz = 0.5; var bGraz = -0.16; // Calculate maximum grazing rate for all types for (var i = 0; i < gmax.GetLength(0); i++) { for (var j = 0; j < gmax.GetLength(1); j++) { gmax[i, j] = aGraz * Math.Pow(cellVol[j], bGraz) * grazInteract[i, j]; eye[i, j] = i; } } // Set gmax1 = grazing eate for Holling I ((mol N l-1 day)-1) for (var i = 0; i < gmax1.GetLength(0); i++) { for (var j = 0; j < gmax1.GetLength(1); j++) { // Ask Mick about this.. gmax and gmax1 are identical(?) gmax1[i, j] = gmax[i, j] / 1.0; } } // Calculate half-saturation constant for grazing (from Ward et al. (2013)) for (var i = 0; i < kbN.GetLength(0); i++) { for (var j = 0; j < kbN.GetLength(1); j++) { kbN[i, j] = 0.5 * (16.0 / 106.0) * grazInteract[i, j]; // micromol N biomass L-1 } } // Set trophic transfer efficiency for (var i = 0; i < gamma.GetLength(0); i++) { for (var j = 0; j < gamma.GetLength(1); j++) { gamma[i, j] = 0.1 * grazInteract[i, j]; } } // Working arrays and variables var heterotrophy = new double[nLevels]; var autotrophy = new double[nLevels]; var predation = new double[nLevels]; var respiration = new double[nLevels]; var imax = nLevels; var jmax = nLevels; var kmax = nLevels; // Create FileHelper engine for writing csv output var headerLine = @"nsupply,bmass0,bmass1,bmass2,bmass3,hmass0,hmass1,hmass2,hmass3"; var engine = new FileHelperEngine <WriteOutput>() { HeaderText = headerLine }; // Make instance for the csv writer engine to hold output before file is written to disk. var finalOutput = new List <WriteOutput>(); // Start loop over snitrate (nitrate supply rate) var sStepMax = 10; for (var sStep = 0; sStep < sStepMax; sStep++) { var sNitrate = 0.02 + sStep * 0.06; Console.WriteLine("sNitrate = {0}", sNitrate); // Place holder is not working // Initiate time related variables within the loop var nCount = 0; var nOut = 0; var time = 0.0; var dBmassNDtOld = 0.0; var dNitrateDtOld = 0.0; var dNitrateDt = 0.0; var prod = new double[nLevels]; // Re-initialize biomass (convert to micromoles N l-1??) for (var i = 0; i < bmassN.Length; i++) { bmassN[i] = bmassN[i] * 1.0e-2; } // Set nitrate concentration var nitrate = 1.0; // Start main time loop for (var nStep = 0; nStep < nStepMax; nStep++) { // Evaluate autotrophy and respiration for (var i = 0; i < autotrophy.Length; i++) { autotrophy[i] = (vmaxN[i] / quotaNitrate[i]) * (nitrate / (nitrate + kn[i])) * bmassN[i]; } // Maintain respiration/background loss for (var i = 0; i < respiration.Length; i++) { respiration[i] = kResp[i] * bmassN[i]; } // Evaluate heterotrophy for (var i = 0; i < imax; i++) { // var sum1 = 0.0; var limita = 0.0; for (var j = 0; j < jmax; j++) { if (holling == 1) { sum1 = sum1 + (gamma[j, i] * gmax1[j, i] * bmassN[i] * bmassN[j]); } if (holling == 2) { limita = bmassN[j] / (bmassN[j] + kbN[j, i]); sum1 = sum1 + (gamma[j, i] * gmax[j, i] * limita * bmassN[i]); } heterotrophy[i] = sum1; } // Predation by all others (ask Mick) var sum2 = 0.0; var limitb = 0.0; for (var k = 0; k < kmax; k++) { if (holling == 1) { sum2 = sum2 + (gmax1[i, k] * bmassN[k] * bmassN[i]); } if (holling == 2) { limitb = bmassN[i] / (bmassN[i] + kbN[i, k]); sum2 = sum2 + (gmax[i, k] * limitb * bmassN[k]); } predation[i] = sum2; } } // To avoid numerical problems, do not let biomass decline below a low threshold value for (var i = 0; i < nLevels; i++) { if (bmassN[i] < 1.0e-25) { respiration[i] = 0.0; predation[i] = 0.0; } } // Evaluate rates of change dNitrateDt = -autotrophy.Sum() + sNitrate; for (var i = 0; i < dBmassNDt.Length; i++) { dBmassNDt[i] = autotrophy[i] + heterotrophy[i] - respiration[i] - predation[i]; } // Store total growth for (var i = 0; i < prod.Length; i++) { prod[i] = autotrophy[i] + heterotrophy[i]; } // Euler forward step var bmassNNew = new double[nLevels]; double nitrateNew; for (var i = 0; i < bmassNNew.Length; i++) { bmassNNew[i] = bmassN[i] + dBmassNDt[i] * dt; bmassN[i] = bmassNNew[i]; } nitrateNew = nitrate + dNitrateDt * dt; nitrate = nitrateNew; // Increment time time = time + dt; // Print to screen and put diagnostics into an array nCount = nCount + 1; if (nCount == nStepOut) { // Print values to screen string screenOut1 = ""; screenOut1 += "Time: " + Math.Round(time, 0) + "; Nitrate: " + nitrate.ToString("0.00E0") + "; Biomass grp0:" + bmassN[0].ToString("0.00E0") + "; Biomass grp1:" + bmassN[1].ToString("0.00E0") + "; Biomass grp2:" + bmassN[2].ToString("0.00E0") + "; Biomass grp3:" + bmassN[3].ToString("0.00E0"); Console.WriteLine(screenOut1); // nCount is the counter to decide whether to write output - reset here nCount = 0; // nOut is the position in the output array time_Out[nOut] = time; nitrate_Out[nOut] = nitrate; for (var i = 0; i < bmassN.Length; i++) { bmassN_Out[i, nOut] = bmassN[i]; autotrophy_Out[i, nOut] = autotrophy[i]; heterotrophy_Out[i, nOut] = heterotrophy[i]; predation_Out[i, nOut] = predation[i]; respiration_Out[i, nOut] = respiration[i]; } // Increment the output counter nOut = nOut + 1; } } // End main time loop //Console.WriteLine("end off loop"); // Save output of each nitrate supply rate loop to csv writing engine finalOutput.Add(new WriteOutput() { nsupply = sNitrate, bmass0 = bmassN_Out[0, (bmassN_Out.GetLength(1) - 1)], bmass1 = bmassN_Out[2, (bmassN_Out.GetLength(1) - 1)], bmass2 = bmassN_Out[4, (bmassN_Out.GetLength(1) - 1)], bmass3 = bmassN_Out[6, (bmassN_Out.GetLength(1) - 1)], hmass0 = bmassN_Out[1, (bmassN_Out.GetLength(1) - 1)], hmass1 = bmassN_Out[3, (bmassN_Out.GetLength(1) - 1)], hmass2 = bmassN_Out[5, (bmassN_Out.GetLength(1) - 1)], hmass3 = bmassN_Out[7, (bmassN_Out.GetLength(1) - 1)], }); // Print matrix to console // //var rowCount = gmax.GetLength(0); //var colCount = gmax.GetLength(1); //for (int row = 0; row < rowCount; row++) //{ // for (int col = 0; col < colCount; col++) // Console.Write(String.Format("{0}\t", gmax[row, col])); // Console.WriteLine(); //} } // Write output to csv file on disk. engine.WriteFile("bmass_out_test.csv", finalOutput); }
public void ExportCarDetails() { //var engine = new FileHelperEngine<CarDetailsClass>(Encoding.UTF8); var engine = new FileHelperEngine <CarDetailsClass>(Encoding.GetEncoding(this.Core.Conf.DefaultCSVEncoding)) { HeaderText = typeof(CarDetailsClass) .GetCsvHeader() }; var cardetails = new List <CarDetailsClass>(); foreach (CarDetailsObj car in CarDetailsObjList) { cardetails.Add(new CarDetailsClass() { ModelId = car.ModelID, ModelTypeId = car.ModelTypeID, Maker = car.Maker, // Marke VW Model = car.Model, //Modell up! 1.0 Type = car.Type, //Typ take up! Series = car.Series, //Baureihe up! InternalClassName = car.InternalClassName, //Herstellerinterne Baureihenbezeichnung AA ModelStart = car.ModelStart, //Modellstart 06/2016 ModelEnd = car.ModelEnd, //Modellende SeriesStart = car.SeriesStart, //Baureihenstart 06/2016 SeriesEnd = car.SeriesEnd, //Baureihenende HSN = car.HSN, //HSN Schlüsselnummer 0603 TSN = car.TSN, //TSN Schlüsselnummer BGU TSN2 = car.TSN2, //TSN Schlüsselnummer 2 CarTax = car.CarTax, //KFZ-Steuer pro Jahr 32 Euro CO2Class = car.CO2Class, //CO2-Effizienzklasse C BasePrice = car.BasePrice, //Grundpreis 9975 Euro //Motor und Antrieb EngineType = car.EngineType, //Motorart Otto Fuel = car.Fuel, //Kraftstoffart Super Fuel2 = car.Fuel2, //Kraftstoffart(2.Antrieb) - EmissionControl = car.EmissionControl, //Abgasreinigung geregelt EngineDesign = car.EngineDesign, //Motorbauart Reihe Cylinder = car.Cylinder, //Anzahl Zylinder 3 FuelType = car.FuelType, //Gemischaufbereitung Einspritzung Charge = car.Charge, //Aufladung keine Aufladung Valves = car.Valves, //Anzahl Ventile 4 Cubic = car.Cubic, //Hubraum 999 ccm PowerKW = car.PowerKW, //Leistung in kW 44 PowerPS = car.PowerPS, //Leistung in PS 60 MaxPower = car.MaxPower, //Leistung maximal bei U/min. 5000 U/min TurningMoment = car.TurningMoment, //Drehmoment 95 Nm MaxTurningMoment = car.MaxTurningMoment, //Drehmoment maximal bei U/min. 3000 U/min TypeOfDrive = car.TypeOfDrive, //Antriebsart Front Gearing = car.Gearing, //Getriebeart Schaltgetriebe Gears = car.Gears, //Anzahl Gänge 5 StartStopAutomatic = car.StartStopAutomatic, //Start-/Stopp-Automatik - EmissionClass = car.EmissionClass, //Schadstoffklasse Euro 6b //Maße und Gewichte Length = car.Length, //Länge 3600 mm Width = car.Width, //Breite 1645 mm Height = car.Height, //Höhe 1504 mm //Karosserie und Fahrwerk Chassis = car.Chassis, //Karosserie Schrägheck Doors = car.Doors, //Türanzahl 3 CarClass = car.CarClass, //Fahrzeugklasse Kleinstwagen (z.B.Twingo) Seats = car.Seats, //Sitzanzahl 4 SpeedUp = car.SpeedUp, // Beschleunigung MaxSpeed = car.MaxSpeed, // Höchstgeschwindigkeit Tank = car.Tank, //Tankgröße 35 l Tank2 = car.Tank2 //Tankgröße(2.Antrieb) - }); } try { Directory.CreateDirectory(ROOT); string maker = string.Empty; if (this.Core.Importer.CurrentType == (int)ImporterBase.ImporterType.ADAC_CURRENTMAKER) { maker = string.Format("_{0}", this.Core.Conf.MakerName); } string fileName = Path.Combine(ROOT, string.Format("{0}{1}.{2}", this.carDetailsFileName, maker, this.Core.Conf.DefaultCSVFileExtension)); engine.WriteFile(fileName, cardetails); } catch (Exception ex) { this.Core.Log.Error(string.Format("CSVExporter::ExportCarDetails : {0}", ex.Message)); } }
public void SwingSet() { // Do a basic products to locations join and return information using (var factory = FactoryBuilder.CreateSessionFactory(userName, passWord)) { using(var session = factory.OpenSession()) { var products = session.Query<products>().ToList<products>(); var productcategories = session.Query<productcategory>().ToList<productcategory>(); var sales = session.Query<sales>().Take<sales>(100).ToList<sales>(); var locations = session.Query<location>().ToList<location>(); var tickets = session.Query<tickets>().ToList<tickets>(); var productsWithCategory = from p in products join c in productcategories on p.productcategory equals c.id select new ProductsWithCategory { PId = p.id, PCat = c.name, PName = p.name }; var results = from s in sales join p in productsWithCategory.ToList<ProductsWithCategory>() on s.productid equals p.PId join l in locations on s.location equals l.id join t in tickets on s.ticketid equals t.id select new SalesAndWeight { SaleDate = Convert.ToDouble(s.datetime), Category = p.PCat, Product = p.PName, Weight = s.weight, Total = Convert.ToDouble(s.total), Refunded = s.refunded, Deleted = s.deleted, UserId = t.userid, Location = l.name, Strain = s.strain, TaxCategory = s.taxcat, TaxPerc = Convert.ToDouble(s.tax), Price = s.price, TaxAmount = s.price * Convert.ToDouble(s.tax) }; products = null; productcategories = null; tickets = null; sales = null; locations = null; var engine = new FileHelperEngine(typeof(SalesAndWeight)); // engine.HeaderText = typeof(SalesAndWeight).GetCsvHeader(); engine.WriteFile(BioTrackEngine.TestResultsFolder + "\\salesAndWeightData.csv", results); } } }
private static void writeRecords(IEnumerable<NetBankRecordWrite> recordWrites, string pathWrite) { var engine = new FileHelperEngine<NetBankRecordWrite>(); engine.WriteFile(pathWrite, recordWrites); }
static void Main(string[] args) { WebClient client = new WebClient(); ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12; string baseHtml = ""; byte[] pageContent = client.DownloadData("https://developers.google.com/public-data/docs/canonical/countries_csv"); UTF8Encoding utf = new UTF8Encoding(); baseHtml = utf.GetString(pageContent); HtmlDocument htmlDoc = new HtmlDocument(); htmlDoc.LoadHtml(baseHtml); var allCountryList = new List <Country>(); var trs = htmlDoc.DocumentNode.SelectNodes("//table//tr").Skip(1); foreach (var tr in trs) { var tds = tr.SelectNodes("td"); allCountryList.Add(new Country() { Id = tds[0].InnerText, Name = tds[3].InnerText, Latitude = tds[1].InnerText, Longitude = tds[2].InnerText }); } var selectedCountryIds = new List <String>() { "RO", "AT", "BE", "BG", "CH", "CY", "DK", "EE", "FR", "FI", "DE", "GR", "IE", "IT", "LU", "LV", "LT", "MT", "NL", "PL", "PT", "GB", "CZ", "SK", "SI", "ES", "SE", "HU", "HR", "RU", "BY", "RS", "MK", "ME", "MD", "NO", "TR", "UA", "AFR", "NAM", "SAM", "ASI", "OCE" }; var selectedCountries = allCountryList.Where(c => selectedCountryIds.Contains(c.Id)).ToList(); selectedCountries.Sort((c1, c2) => c1.Id.CompareTo(c2.Id)); selectedCountries.AddRange(new List <Country>() { new Country() { Id = "AFR", Name = "Africa", Latitude = "15.454166", Longitude = "18.732207" }, new Country() { Id = "NAM", Name = "America", Latitude = "37.09024", Longitude = "-95.712891" }, new Country() { Id = "SAM", Name = "South America", Latitude = "-14.235004", Longitude = "-51.92528" }, new Country() { Id = "ASI", Name = "Asia", Latitude = "35.86166", Longitude = "104.195397" }, new Country() { Id = "OCE", Name = "Oceania", Latitude = "-0.789275", Longitude = "113.921327" } }); //Write to CSV var engine = new FileHelperEngine <Country> { HeaderText = "Id,Label,Lat,Lon" }; engine.WriteFile("..//..//..//db//Nodes.csv", selectedCountries); }
public void ExecuteNSEEQUITYProcessing(string[] strMTOArr, string[] strNSEArr, string strNSESEC, string strOutputFormat, string strOutputFolder, Action <string> AddMessageToLog) { FileHelperEngine engineMTO = new FileHelperEngine(typeof(NSEMTO)); DelimitedClassBuilder cb = BuildNSECMPFile(); FileHelperEngine engineCMP = new FileHelperEngine(typeof(NSECMP)); FileHelperEngine engineSEC = new FileHelperEngine(typeof(NSESEC)); foreach (string obj in strNSEArr) { //Get NSE Equity Filename day, month, year int index = obj.IndexOf("cm"); string day = obj.Substring(index + 2, 2); string monthname = obj.Substring(index + 4, 3); string year = obj.Substring(index + 7, 4); int month = Convert.ToDateTime("01-" + monthname + "-2011").Month; if (month < 10) { monthname = "0"; } else { monthname = ""; } monthname += month.ToString(); string MTOfilename = "MTO_" + day + monthname + year + ".DAT"; string MTOfilenamewithpath = GetFileNameWithPath(strMTOArr, MTOfilename); if (!File.Exists(MTOfilenamewithpath)) { AddMessageToLog("File " + MTOfilenamewithpath + " does not exist!"); continue; } NSEMTO[] resmto = engineMTO.ReadFile(MTOfilenamewithpath) as NSEMTO[]; if (!File.Exists(obj)) { AddMessageToLog("File " + obj + " does not exist!"); continue; } NSECMP[] rescmp = engineCMP.ReadFile(obj) as NSECMP[]; if (!File.Exists(strNSESEC)) { AddMessageToLog("File " + strNSESEC + " does not exist!"); continue; } NSESEC[] ressec = engineSEC.ReadFile(strNSESEC) as NSESEC[]; int iTotalRows = rescmp.Length; for (int i = 0; i < iTotalRows; i++) { if (rescmp[i].Series == "EQ" || rescmp[i].Series == "BE") { //Copy OI from MTO for (int j = 0; j < resmto.Length; j++) { if ((resmto[j].NameOfSecurity == (string)rescmp[i].Symbol) && (resmto[j].series == (string)rescmp[i].Series)) { rescmp[i].OI = resmto[j].DeliverableQty; break; } } //Copy Security Name from SEC for (int j = 0; j < ressec.Length; j++) { if ((ressec[j].Symbol == (string)rescmp[i].Symbol)) { rescmp[i].SecurityName = ressec[j].SecurityName; break; } } } } //engineCMP.HeaderText = "Symbol,Series,Open,High,Low,Close,Last,PrevClose,Tottrdqty,Tottrdval,Timestamp,Totaltrades,Isin,OI,SecurityName"; //Dump File data engineCMP.HeaderText = "Ticker,Series,Open,High,Low,Close,Last,PrevClose,Volume,Tottrdval,Date,Totaltrades,Isin,OPENINT,NAME"; engineCMP.WriteFile(obj, rescmp); int totrows = 0; int itmp = 0; int cnt = 0; //Calculate number of rows which have series as EQ or BE and are not NULL while (cnt < rescmp.Length) { if (rescmp[cnt].Series == "EQ" || rescmp[cnt].Series == "BE") { totrows++; } cnt++; } NSECMPFINAL[] finalarr = new NSECMPFINAL[totrows]; DateTime myDate; itmp = 0; int icntr = 0; while (icntr < rescmp.Length) { if (rescmp[icntr].Series == "EQ" || rescmp[icntr].Series == "BE") { finalarr[itmp] = new NSECMPFINAL(); finalarr[itmp].Ticker = rescmp[icntr].Symbol; finalarr[itmp].Name = rescmp[icntr].SecurityName; myDate = DateTime.Parse(rescmp[icntr].Timestamp); finalarr[itmp].Date = String.Format("{0:yyyyMMdd}", myDate); finalarr[itmp].Open = rescmp[icntr].Open; finalarr[itmp].High = rescmp[icntr].High; finalarr[itmp].Low = rescmp[icntr].Low; finalarr[itmp].Close = rescmp[icntr].Close; finalarr[itmp].Volume = rescmp[icntr].Tottrdqty; finalarr[itmp].OpenInt = rescmp[icntr].OI; itmp++; } icntr++; } FileHelperEngine engineCMPFINAL = new FileHelperEngine(typeof(NSECMPFINAL)); engineCMPFINAL.HeaderText = "Ticker,Name,Date,Open,High,Low,Close,Volume,Openint"; engineCMPFINAL.WriteFile(obj, finalarr); //FileHelpers.CsvOptions options = new FileHelpers.CsvOptions("ImportRecord", ',', obj); //options.HeaderLines = 1; //FileHelperEngine test = new FileHelpers.CsvEngine(options); ////DataTable header = test.ReadStringAsDT(FileHelpers.CommonEngine.RawReadFirstLines(obj, 1)); ////test.Options.IgnoreFirstLines = 0; //DataTable dttest = test.ReadFileAsDT(obj); string[] words = null; words = strOutputFormat.Split(','); //Get Filename index = obj.IndexOf("cm"); string fname = obj.Substring(index, 19); string folder; foreach (string item in words) { string outputfoldername = Path.Combine(strOutputFolder, item, "NSE", "Equity"); if (item == "StdCSV" || item == "Metastock" || item == "Ninja" || item == "FCharts") { folder = Path.Combine(strOutputFolder, item, "NSE", "Equity", fname); if (!Directory.Exists(outputfoldername)) { AddMessageToLog("Directory " + outputfoldername + " does not exist!"); } else { File.Copy(obj, folder, true); } } else if (item == "Amibroker") { engineCMPFINAL.Options.IgnoreFirstLines = 1; engineCMPFINAL.WriteFile(obj, finalarr); folder = Path.Combine(strOutputFolder, item, "NSE", "Equity", fname); if (!Directory.Exists(outputfoldername)) { AddMessageToLog("Directory " + outputfoldername + " does not exist!"); } else { File.Copy(obj, folder, true); } } } File.Delete(obj); } }
public virtual void WriteData(IList <T> values, string path, string delimiter) { //engine.HeaderText = headerText; engine.WriteFile(path, values); }
static void Main(string[] args) { var filePaths = args?.ToList(); if (filePaths == null || filePaths.Count == 0) { Common.PrintUsage(Name); return; } Console.WriteLine($"{Name}:"); Console.WriteLine(); var results = new List <ExportRecord>(); var parser = new Parser(); foreach (var filePath in filePaths) { Console.WriteLine($"Path: {filePath}"); using (var stream = File.Open(filePath, FileMode.Open)) { Console.WriteLine($"Length: {stream.Length}"); var configFile = parser.ParseConfigFile(filePath, stream); var nodes = configFile.RootNode.Descendants(); var partNodes = nodes.Where(x => x.Type == NodeType.Part).ToList(); Console.WriteLine( $"Part nodes: {partNodes.Count}" ); var partsWithAntennas = partNodes .Where(x => x.Nodes .Any(n => n.Type == NodeType.Module && n.AttributeDefinitions.Any(ad => ad.Name == "name" && ad.Value == "ModuleDataTransmitter" ) ) ) ; Console.WriteLine( $"Part nodes with antennas: {partsWithAntennas.Count()}" ); results.AddRange(ConvertNodesToExportRecords( filePath, partsWithAntennas )); } Console.WriteLine($"Finished {filePath}"); Console.WriteLine(); } Console.WriteLine($"Writing {Name}.csv"); var engine = new FileHelperEngine <ExportRecord>(); engine.HeaderText = engine.GetFileHeader(); engine.WriteFile($"{Name}.csv", results); Console.WriteLine("Completed."); Console.WriteLine(); }
public async Task RunAsync(string startDate, string endDate) { var engine = new FileHelperEngine <ESocDelimited>(); var eSocFiles = new List <ESocDelimited>(); var consigmentRequests = new List <ConsigmentRequest>(); var client = new HttpClient(); client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", m_ostAdminToken); bool requestPickup = true; int pickupCount = 0; int pickupPage = 1; int pickupSize = 20; while (requestPickup) { var requestUri = new Uri($"{m_ostBaseUrl}/api/consigment-requests/pickedup-all-within-range/{startDate}/{endDate}/?size={pickupSize}&page={pickupPage}"); var response = await client.GetAsync(requestUri); var output = string.Empty; if (response.IsSuccessStatusCode) { Console.WriteLine($"RequestUri: {requestUri.ToString()}"); Console.WriteLine($"Status: {(int)response.StatusCode} {response.ReasonPhrase.ToString()}"); output = await response.Content.ReadAsStringAsync(); } else { Console.WriteLine($"RequestUri: {requestUri.ToString()}"); Console.WriteLine($"Status: {(int)response.StatusCode} {response.ReasonPhrase.ToString()}"); return; } var json = JObject.Parse(output).SelectToken("_results"); foreach (var jtok in json) { var consigmentRequest = jtok.ToJson().DeserializeFromJson <ConsigmentRequest>(); consigmentRequests.Add(consigmentRequest); Console.WriteLine($"Pickup Number: {consigmentRequest.Pickup.Number} ....."); } Console.WriteLine($"Pickup count: {consigmentRequests.Count} ....."); pickupCount = JObject.Parse(output).SelectToken("_count").Value <int>(); pickupPage = JObject.Parse(output).SelectToken("_page").Value <int>(); pickupSize = JObject.Parse(output).SelectToken("_size").Value <int>(); if ((pickupPage * pickupSize) >= pickupCount) { requestPickup = false; } pickupPage++; } int sequenceNumberCount = 1; foreach (var consigmentRequest in consigmentRequests) { var eSocFileHeader = new ESocDelimited { Indicator = "9", OrderType = "01", SalesOrg = "1000", DistributionChannel = "60", Division = "10", SoldToPartyAccountNumber = consigmentRequest.UserId, CourierIdHeader = "00392557", //TODO CourierNameHeader = "YUSRI", //TODO ConsignmentAcceptanceTimeStamp = consigmentRequest.CreatedDate, BranchCodeHeader = "5312", //TODO CourierIdItem = "-", ShipToPartyPostcode = "-", ProductCodeMaterial = "-", Quantity = "-", BranchCodeItem = "-", Agent = "-", ConNoteNumberParent = "-", ConNoteNumberChild = "-", Weight = "-", CustomerDeclaredWeight = "-", VolumetricDimension = "-", VolumetricWeight = "-", ValueAdded = "-", SurchargeCode = "-", SumInsured = "-", SubAccountRef = "-", RecipientRefNumber = "-", Zone = "-", CountryCode = "-", ItemCategoryType = "-", MpsIndicator = "-", OddItemAmount = "-", OddItemDescription = "-", PickupNumber = consigmentRequest.Pickup.Number, Mhl = "0", Batch = string.Format("{0:00000}", sequenceNumberCount) }; eSocFiles.Add(eSocFileHeader); int itemCount = 1; foreach (var consigment in consigmentRequest.Consignments) { var eSocFileItem = new ESocDelimited { Indicator = "1", OrderType = "-", SalesOrg = "-", DistributionChannel = "-", Division = "-", SoldToPartyAccountNumber = consigmentRequest.UserId, CourierIdHeader = "-", CourierNameHeader = "-", ConsignmentAcceptanceTimeStamp = consigmentRequest.CreatedDate, BranchCodeHeader = "-", CourierIdItem = "00392557", //TODO ShipToPartyPostcode = (consigment.Produk.IsInternational) ? "-" : consigment.Penerima.Address.Postcode, ProductCodeMaterial = (consigment.Produk.IsInternational) ? "80000001" : "80000000", Quantity = "1", BranchCodeItem = "5312", //TODO Agent = "01", ConNoteNumberParent = consigment.ConNote, ConNoteNumberChild = "-", //TODO Weight = "-", //TODO CustomerDeclaredWeight = consigment.Produk.Weight.ToString("0.000"), VolumetricDimension = $"{consigment.Produk.Length.ToString("0.00")}x{consigment.Produk.Width.ToString("0.00")}x{consigment.Produk.Height.ToString("0.00")}", VolumetricWeight = GetVolumetricWeight(consigment).ToString("0.000"), ValueAdded = "1101", SurchargeCode = "0101", SumInsured = consigment.Produk.ValueAddedDeclaredValue.ToString("0.00"), SubAccountRef = consigmentRequest.UserId, RecipientRefNumber = "-", Zone = "01", CountryCode = consigment.Penerima.Address.Country, ItemCategoryType = consigment.Produk.ItemCategory, MpsIndicator = (consigment.IsMps) ? "02" : "01", OddItemAmount = "0.00", OddItemDescription = "-", PickupNumber = "-", Mhl = "-", Batch = string.Format("{0:00000}", sequenceNumberCount) }; itemCount++; eSocFiles.Add(eSocFileItem); if (itemCount > 20) { itemCount = 1; eSocFiles.Add(eSocFileHeader); sequenceNumberCount++; } } sequenceNumberCount++; } var path = $@"{m_eSocFolder}\est_esoc_hq_{DateTime.Now:yyyyMMdd-HHmmss}_{eSocFiles.Count - (sequenceNumberCount - 1)}.txt"; engine.WriteFile(path, eSocFiles); await Task.Delay(100); }
public void DiscardWriteRead() { var engine = new FileHelperEngine<DiscardLastType1>(); DiscardLastType1[] res = TestCommon.ReadTest<DiscardLastType1>(engine, "Good", "DiscardLast1.txt"); engine.FooterText = expectedShortFooterText; engine.WriteFile("tempo.txt", res); engine.FooterText = "none none"; DiscardLastType1[] res2 = engine.ReadFile(@"tempo.txt"); Assert.AreEqual(res.Length, res2.Length); Assert.AreEqual(expectedShortFooterText, engine.FooterText); if (File.Exists("tempo.txt")) File.Delete("tempo.txt"); Assert.AreEqual(4, res.Length); Assert.AreEqual(new DateTime(1314, 12, 11), res[0].Field1); }
static void Main() { Console.WriteLine("1. Clean old data."); OdbFactory.Delete("FileOut.ndb"); OdbFactory.Delete("FileOut.txt"); TimeSnapshot(); Console.WriteLine("2. Read input file."); var engine = new FileHelperEngine(typeof(ExportData)); var res = engine.ReadFile("FileIn.txt") as ExportData[]; TimeSnapshot(); Console.WriteLine("3. Prepare NDatabase db."); using (var odb = OdbFactory.Open("FileOut.ndb")) { Console.WriteLine("3a. Store items into NDatabase."); foreach (var exportData in res) { odb.Store(exportData); } TimeSnapshot(); Console.WriteLine("3b. Create index on NDatabase."); odb.IndexManagerFor <ExportData>().AddIndexOn("countryIndex", new[] { "CountryOrArea" }); } TimeSnapshot(); Console.WriteLine("4. Prepare final input file."); // To Write Use: engine.WriteFile("FileOut.txt", res); TimeSnapshot(); Console.WriteLine("5. Start counting EGYPT by FileHelpers."); var stopwatch = new Stopwatch(); stopwatch.Start(); res = engine.ReadFile("FileOut.txt") as ExportData[]; var count = res.Count(x => x.CountryOrArea.Equals("EGYPT")); stopwatch.Stop(); Console.WriteLine("Egypt items: {0}", count); Console.WriteLine("Ellapsed: {0} ms", stopwatch.ElapsedMilliseconds); Console.WriteLine("5. Start counting EGYPT by NDatabase."); stopwatch.Reset(); stopwatch.Start(); long count2; using (var odb = OdbFactory.Open("FileOut.ndb")) { count2 = (from data in odb.AsQueryable <ExportData>() where data.CountryOrArea.Equals("EGYPT") select data).Count(); } stopwatch.Stop(); Console.WriteLine("Egypt items: {0}", count2); Console.WriteLine("Ellapsed: {0} ms", stopwatch.ElapsedMilliseconds); TimeSnapshot(); }
static void EventHandling() { Console.WriteLine("Reading Using EventHandlers ..."); Console.WriteLine(); // Estas dos lineas son el uso de la librería FileHelperEngine engine = new FileHelperEngine(typeof(Customer)); engine.BeforeReadRecord += new BeforeReadRecordHandler(BeforeReadRecord); engine.AfterWriteRecord +=new AfterWriteRecordHandler(AfterWriteRecord); Customer[] customers = (Customer[]) engine.ReadFile(@"..\Data\CustomersDelimited.txt"); // A partir de la versión 1.4.0 se puede // inclusive escribir en una sola línea: // Cliente[] clientes = (Cliente[]) CommonEngine.ReadFile(typeof(Cliente), @"..\Data\ClientesDelimitados.txt"); // Aqui es donde ustedes agregan su código foreach (Customer cli in customers) { Console.WriteLine("Customer: " + cli.CustId.ToString() + " - " + cli.Name); } Console.ReadLine(); Console.WriteLine("Writing data to a delimited file..."); Console.WriteLine(); // write the data to a file engine.WriteFile("temp.txt", customers); Console.WriteLine("Data successful written !!!"); Console.ReadLine(); if (File.Exists("temp.txt")) File.Delete("temp.txt"); }
static void Main(string[] args) { Console.WriteLine("IPG Academic Roundtable-Survey Data Processor Started"); var engine = new FileHelperEngine <SurveyResponse>(); engine.Options.IgnoreFirstLines = 1; var result = engine.ReadFile(@"C:\Users\jabarnwe\Source\Repos\OSSCELA-IPG-Roundtable\IPG-Roundtable\PatentDataAcquisition\Data\IPG_Academic_Roundtable-Wisdom_of_Crowds_on_Patents_and_Open_Source_v2.csv"); List <OrganizationIPSentiment> orgIPSentiment = new List <OrganizationIPSentiment>(); foreach (var response in result) { Console.WriteLine(response.Amazon_Supports_Open_Source); OrganizationIPSentiment responseToAdd1 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd1); responseToAdd1.Organization = "Amazon"; responseToAdd1.PatentSupport = response.Amazon_Supports_Patent_Rights; responseToAdd1.OpenSourceSupport = response.Amazon_Supports_Open_Source; OrganizationIPSentiment responseToAdd2 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd2); responseToAdd2.Organization = "Apple"; responseToAdd2.PatentSupport = response.Apple_Supports_Patent_Rights; responseToAdd2.OpenSourceSupport = response.Apple_Supports_Open_Source; OrganizationIPSentiment responseToAdd3 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd3); responseToAdd3.Organization = "Cisco"; responseToAdd3.PatentSupport = response.Cisco_Supports_Patent_Rights; responseToAdd3.OpenSourceSupport = response.Cisco_Supports_Open_Source; OrganizationIPSentiment responseToAdd4 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd4); responseToAdd4.Organization = "Facebook"; responseToAdd4.PatentSupport = response.Facebook_Supports_Patent_Rights; responseToAdd4.OpenSourceSupport = response.Facebook_Supports_Open_Source; //OrganizationIPSentiment responseToAdd5 = new OrganizationIPSentiment(); //orgIPSentiment.Add(responseToAdd5); //responseToAdd5.Organization = "Fujitsu"; OrganizationIPSentiment responseToAdd6 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd6); responseToAdd6.Organization = "Google"; responseToAdd6.PatentSupport = response.Google_Supports_Patent_Rights; responseToAdd6.OpenSourceSupport = response.Google_Supports_Open_Source; OrganizationIPSentiment responseToAdd7 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd7); responseToAdd7.Organization = "Huawei"; responseToAdd7.PatentSupport = response.Huawei_Supports_Patent_Rights; responseToAdd7.OpenSourceSupport = response.Huawei_Supports_Open_Source; OrganizationIPSentiment responseToAdd8 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd8); responseToAdd8.Organization = "IBM"; responseToAdd8.PatentSupport = response.IBM_Supports_Patent_Rights; responseToAdd8.OpenSourceSupport = response.IBM_Supports_Open_Source; OrganizationIPSentiment responseToAdd9 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd9); responseToAdd9.Organization = "Intel"; responseToAdd9.PatentSupport = response.Intel_Supports_Patent_Rights; responseToAdd9.OpenSourceSupport = response.Intel_Supports_Open_Source; OrganizationIPSentiment responseToAdd10 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd10); responseToAdd10.Organization = "Microsoft"; responseToAdd10.PatentSupport = response.Microsoft_Supports_Patent_Rights; responseToAdd10.OpenSourceSupport = response.Microsoft_Supports_Open_Source; OrganizationIPSentiment responseToAdd11 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd11); responseToAdd11.Organization = "Oracle"; responseToAdd11.PatentSupport = response.Oracle_Supports_Patent_Rights; responseToAdd11.OpenSourceSupport = response.Oracle_Supports_Open_Source; OrganizationIPSentiment responseToAdd12 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd12); responseToAdd12.Organization = "Qualcomm"; responseToAdd12.PatentSupport = response.Qualcomm_Supports_Patent_Rights; responseToAdd12.OpenSourceSupport = response.Qualcomm_Supports_Open_Source; OrganizationIPSentiment responseToAdd13 = new OrganizationIPSentiment(); orgIPSentiment.Add(responseToAdd13); responseToAdd13.Organization = "Samsung"; responseToAdd13.PatentSupport = response.Samsung_Supports_Patent_Rights; responseToAdd13.OpenSourceSupport = response.Samsung_Supports_Open_Source; } var engine2 = new FileHelperEngine <OrganizationIPSentiment>(); engine2.HeaderText = engine2.GetFileHeader(); engine2.WriteFile(@"C:\Users\jabarnwe\Source\Repos\OSSCELA-IPG-Roundtable\IPG-Roundtable\PatentDataAcquisition\Data\IPG_Academic_Roundtable-Patents_and_Open_Source_Sentiment.csv", orgIPSentiment); Console.ReadLine(); }
public void ExecuteBSEEQUITYProcessing(string[] strBSECSVArr, string[] strSCBTXTArr, string strOutputFormat, string strOutputFolder, Action <string> AddMessageToLog) { FileHelperEngine engineBSECSV = new FileHelperEngine(typeof(BSECSV)); DelimitedClassBuilder cb = BuildNSECMPFile(); FileHelperEngine engineSCBTXT = new FileHelperEngine(typeof(SCBTXT)); foreach (string obj in strBSECSVArr) { //Get BSE Equity Filename day, month, year string [] words = obj.Split('\\'); string strbseequityfilename = words[words.Length - 1]; string strday = strbseequityfilename.Substring(2, 2); string strmon = strbseequityfilename.Substring(4, 2); string stryear = strbseequityfilename.Substring(6, 2); int index = obj.IndexOf("EQ"); string dt = strbseequityfilename.Substring(2, 6); string scbtxtfilename = "SCBSEALL" + strbseequityfilename.Substring(2, 4) + ".TXT"; if (!File.Exists(obj)) { AddMessageToLog("File " + strbseequityfilename + " does not exist!"); continue; } string SCBSETXTfilenamewithpath = GetFileNameWithPath(strSCBTXTArr, scbtxtfilename); if (!File.Exists(SCBSETXTfilenamewithpath)) { AddMessageToLog("File " + scbtxtfilename + " does not exist!"); continue; } BSECSV[] resbsecsv = engineBSECSV.ReadFile(obj) as BSECSV[]; SCBTXT[] resscbtxt = engineSCBTXT.ReadFile(SCBSETXTfilenamewithpath) as SCBTXT[]; int iTotalRows = resbsecsv.Length; for (int i = 0; i < iTotalRows; i++) { //Copy OI from MTO for (int j = 0; j < resscbtxt.Length; j++) { if (resbsecsv[i].sc_code == resscbtxt[j].scripcode) { resbsecsv[i].openint = resscbtxt[j].deliveryqty; break; } } } int totrows = 0; int itmp = 0; int cnt = 0; BSECSVFINAL[] finalarr = new BSECSVFINAL[resbsecsv.Length]; DateTime myDate; itmp = 0; int icntr = 0; while (icntr < resbsecsv.Length) { finalarr[icntr] = new BSECSVFINAL(); finalarr[icntr].ticker = resbsecsv[icntr].sc_code; finalarr[icntr].name = resbsecsv[icntr].sc_name; //myDate = Convert.ToDateTime(dt); //myDate = DateTime.ParseExact(dt, "ddMMyyyy", CultureInfo.InvariantCulture); //myDate=Convert.ToDateTime(strday + "-"+ strmon + "-20" + stryear); //finalarr[itmp].date = myDate.ToString("yyyyMMdd"); //String.Format("{0:yyyyMMdd}", dt); finalarr[icntr].date = "20" + stryear + strmon + strday; // String.Format("{0:yyyyMMdd}", myDate); finalarr[icntr].open = resbsecsv[icntr].open; finalarr[icntr].high = resbsecsv[icntr].high; finalarr[icntr].low = resbsecsv[icntr].low; finalarr[icntr].close = resbsecsv[icntr].close; finalarr[icntr].volume = resbsecsv[icntr].no_of_shrs; if ((resbsecsv[icntr].openint) == null) { resbsecsv[icntr].openint = 0; } finalarr[icntr].openint = resbsecsv[icntr].openint; //enint; icntr++; } FileHelperEngine engineBSECSVFINAL = new FileHelperEngine(typeof(BSECSVFINAL)); engineBSECSVFINAL.HeaderText = "Ticker,Name,Date,Open,High,Low,Close,Volume,OPENINT"; engineBSECSVFINAL.WriteFile(obj, finalarr); string folder; words = null; words = strOutputFormat.Split(','); foreach (string item in words) { if (item == "StdCSV" || item == "Metastock" || item == "Ninja" || item == "FCharts") { folder = Path.Combine(strOutputFolder, item, "BSE", "Equity", strbseequityfilename); File.Copy(obj, folder, true); } else if (item == "Amibroker") { engineBSECSVFINAL.Options.IgnoreFirstLines = 1; engineBSECSVFINAL.WriteFile(obj, finalarr); folder = Path.Combine(strOutputFolder, item, "BSE", "Equity", strbseequityfilename); File.Copy(obj, folder, true); } } File.Delete(obj); } }
private static void backtest(int quarter,decimal threshold, int buyinpower) { TimeSpan st = DateTime.Now.TimeOfDay; #region hoilday dictionary bool flag3; Dictionary<string, DateTime> strs3 = new Dictionary<string, DateTime>(); List<EarningData> earningDatas = new List<EarningData>(); FileStream fileStream1 = new FileStream("holiday.txt", FileMode.Open); StreamReader streamReader1 = new StreamReader(fileStream1); do { string str1 = streamReader1.ReadLine(); if (!(str1 == "")) { strs3[str1] = DateTime.ParseExact(str1, "yyyyMMdd", CultureInfo.InvariantCulture, DateTimeStyles.None); flag3 = (str1 == null ? false : str1 != ""); } else { break; } } while (flag3); streamReader1.Close(); streamReader1.Dispose(); fileStream1.Close(); fileStream1.Dispose(); #endregion #region get the quarter int year = quarter / 10; string begindate = year.ToString(); string enddate = year.ToString(); if (quarter % 10 == 1) { begindate = string.Concat(begindate, "0101"); enddate = string.Concat(enddate, "0315"); } if (quarter % 10 == 2) { begindate = string.Concat(begindate, "0401"); enddate = string.Concat(enddate, "0615"); } if (quarter % 10 == 3) { begindate = string.Concat(begindate, "0701"); enddate = string.Concat(enddate, "0915"); } if (quarter % 10 == 4) { begindate = string.Concat(begindate, "1001"); enddate = string.Concat(enddate, "1215"); } #endregion #region create bio tech symbols Dic bool flag2; Dictionary<string, string> bioArry = new Dictionary<string, string>(); FileStream fileStream = new FileStream("bio.csv", FileMode.Open); StreamReader streamReader = new StreamReader(fileStream); streamReader.BaseStream.Seek((long)0, SeekOrigin.Begin); string str2 = streamReader.ReadLine(); while (true) { flag2 = (str2 == null ? false : str2 != ""); if (!flag2) { break; } bioArry[str2] = str2; str2 = streamReader.ReadLine(); } streamReader.Close(); streamReader.Dispose(); fileStream.Close(); fileStream.Dispose(); #endregion #region get right the earingdata FileHelperEngine EngineDBq = new FileHelperEngine(typeof(DailyData1)); DailyData1[] resDB = (DailyData1[])EngineDBq.ReadFile(string.Format("Daily\\DailyQua\\{0}.csv", quarter)); FileHelperEngine EngineEarning = new FileHelperEngine(typeof(unionearingdate)); unionearingdate[] unionearingdateArray = (unionearingdate[])EngineEarning.ReadFile("Daily\\final.csv"); var selectEarningdata = unionearingdateArray.Where(d => d.date >= DateTime.ParseExact(begindate, "yyyyMMdd", CultureInfo.InvariantCulture, DateTimeStyles.None) && d.date <= DateTime.ParseExact(enddate, "yyyyMMdd", CultureInfo.InvariantCulture, DateTimeStyles.None)). Where(d => Math.Abs(d.threshold) >= threshold). Where(d => (d.high >= d.maxhigh && d.threshold > 0) || (d.low <= d.minlow && d.threshold < 0)). Where(d => { bool symbolExit; symbolExit = !bioArry.ContainsKey(d.symbol); return symbolExit; }). Where(d=>d.high>=5) ; EngineEarning.WriteFile("Daily\\temp.csv", selectEarningdata); unionearingdate[] earningdata = (unionearingdate[])EngineEarning.ReadFile("Daily\\temp.csv"); #endregion #region combin earning and spy in daily report #region select data from DB var spy=resDB.Where(d=>d.symbol=="SPY"); var selectDB = from db in resDB from e in earningdata where (db.symbol == e.symbol) && (db.date >= e.date) select new DailyDataout { symbol=db.symbol, open=db.open, high=db.high, low=db.low, close=db.close, volume=db.volume, adv=db.adv, threshold=e.threshold, date=db.date }; selectDB = selectDB.Where(dd => dd.date <= DateTime.ParseExact(enddate, "yyyyMMdd", CultureInfo.InvariantCulture, DateTimeStyles.None)); FileHelperEngine EngineOut = new FileHelperEngine(typeof(DailyDataout)); EngineOut.HeaderText = "symbol,date,open,high,low,close,volume,Exist signal price,pricein,priceout,threshold,spy close,spy open,spy high, spy low,adv,win10,lose10,overnight pnl,day pnl,pnl,spy overnight pnl,spy day pnl, spy pnl,bp,beta"; EngineOut.WriteFile("Daily\\temp.csv", selectDB); #endregion #region combine spy DailyDataout[] selectD = (DailyDataout[])EngineOut.ReadFile("Daily\\temp.csv"); var combinspy = from db in selectD join e in spy on db.date equals e.date select new DailyDataout { symbol = db.symbol, open = db.open, high = db.high, low = db.low, close = db.close, volume = db.volume, adv = db.adv, threshold = db.threshold, date = db.date, spyclose=e.close, spyopen=e.open, spylow=e.low, spyhigh=e.high }; #endregion bool yc2tc = false; #region add beta if (yc2tc) { #region get beta close to close List<DailyDataout> newcombinspy = new List<DailyDataout>(); string symbol = ""; decimal beta = 0; decimal yesterdayClose = 0; decimal yesterdaySPYClose = 0; foreach (var data in combinspy) { DailyDataout tempdailyout = new DailyDataout(); tempdailyout = data; if (tempdailyout.symbol == symbol) { beta = ((tempdailyout.close / yesterdayClose) / (tempdailyout.spyclose / yesterdaySPYClose) - 1) * 100; tempdailyout.beta = beta; yesterdayClose = tempdailyout.close; yesterdaySPYClose = tempdailyout.spyclose; } else { symbol = tempdailyout.symbol; yesterdayClose = tempdailyout.close; yesterdaySPYClose = tempdailyout.spyclose; } newcombinspy.Add(tempdailyout); } #endregion EngineOut.WriteFile("Daily\\temp.csv", newcombinspy); } else { #region get beta close to open List<DailyDataout> newcombinspy = new List<DailyDataout>(); string symbol = ""; decimal beta = 0; foreach (var data in combinspy) { DailyDataout tempdailyout = new DailyDataout(); tempdailyout = data; if (tempdailyout.symbol == symbol) { beta = ((tempdailyout.close / tempdailyout.open) / (tempdailyout.spyclose / tempdailyout.spyopen) - 1) * 100; tempdailyout.beta = beta; } else { symbol = tempdailyout.symbol; beta = ((tempdailyout.close / tempdailyout.open) / (tempdailyout.spyclose / tempdailyout.spyopen) - 1) * 100; } newcombinspy.Add(tempdailyout); } #endregion EngineOut.WriteFile("Daily\\temp.csv", newcombinspy); } #endregion #endregion #region date thrshold DateTime lastDate = DateTime.ParseExact(enddate, "yyyyMMdd", CultureInfo.InvariantCulture, DateTimeStyles.None); if (lastDate.DayOfWeek == DayOfWeek.Sunday) { if (strs3.ContainsKey(lastDate.AddDays(-2).ToString("yyyyMMdd")) || strs3.ContainsKey(lastDate.AddDays(-3).ToString("yyyyMMdd"))) { lastDate = lastDate.AddDays(-4); } else lastDate = lastDate.AddDays(-3); } else if (lastDate.DayOfWeek == DayOfWeek.Saturday) { if (strs3.ContainsKey(lastDate.AddDays(-1).ToString("yyyyMMdd")) || strs3.ContainsKey(lastDate.AddDays(-2).ToString("yyyyMMdd"))) { lastDate = lastDate.AddDays(-3); } else lastDate = lastDate.AddDays(-2); } else if (lastDate.DayOfWeek == DayOfWeek.Monday) { if (strs3.ContainsKey(lastDate.ToString("yyyyMMdd")) || strs3.ContainsKey(lastDate.AddDays(-3).ToString("yyyyMMdd"))) { lastDate = lastDate.AddDays(-4); } else lastDate = lastDate.AddDays(-3); } else if (lastDate.DayOfWeek == DayOfWeek.Tuesday) { if (strs3.ContainsKey(lastDate.ToString("yyyyMMdd")) || strs3.ContainsKey(lastDate.AddDays(-1).ToString("yyyyMMdd"))) { lastDate = lastDate.AddDays(-4); } else lastDate = lastDate.AddDays(-1); } else { if (strs3.ContainsKey(lastDate.ToString("yyyyMMdd")) || strs3.ContainsKey(lastDate.AddDays(-1).ToString("yyyyMMdd"))) { lastDate = lastDate.AddDays(-2); } else lastDate = lastDate.AddDays(-1); } #endregion strategy2ExitbySumBeta(quarter, threshold, buyinpower, (decimal)5, lastDate); DailyGroupbysymbol(quarter, threshold); #region create daily report //FileHelperEngine enginDR = new FileHelperEngine(typeof(DailyDataout)); //enginDR.WriteFile(string.Format("DailyReport\\{0}.{1}.D.csv", quarter, Convert.ToInt32(threshold * new decimal(100))), ReportEaring); DailyDataout[] ReportEaring = (DailyDataout[])EngineOut.ReadFile(string.Format("DailyReport\\{0}.{1}.D.csv", quarter, Convert.ToInt32(threshold * new decimal(100)))); var DailyReport = ReportEaring.GroupBy(d => d.date).Select(d => new DailyReport { date=d.Key, pnl=d.Sum(dd=>dd.pnl)+d.Sum(dd=>dd.spypnl), pnlovernight=d.Sum(dd=>dd.overnightpnl)+d.Sum(dd=>dd.overnightspypnl), pnlday=d.Sum(dd=>dd.daypnl)+d.Sum(dd=>dd.dayspypnl), longcount=d.Where(dd=>dd.threshold>0).Count(), shortcount=d.Where(dd=>dd.threshold<0).Count(), longpnl=d.Where(dd=>dd.threshold>0).Sum(dd=>dd.pnl), shortpnl = d.Where(dd => dd.threshold < 0).Sum(dd => dd.pnl), qua=quarter, lose10=d.Sum(dd=>dd.lose10), win10=d.Sum(dd=>dd.win10), hspypnl = d.Sum(dd => dd.spypnl), }).OrderBy(d=>d.date); #endregion #region get the bp and accpnl daily List<DailyReport> newDailyReport = new List<DailyReport>(); decimal accPnl = 0; foreach(var date in DailyReport) { DailyReport tempdailyreport = new DailyReport(); tempdailyreport = date; accPnl = accPnl + date.pnl; tempdailyreport.accPnl = accPnl; tempdailyreport.bp = Math.Max(date.longcount, date.shortcount) * 2 * buyinpower; newDailyReport.Add(tempdailyreport); } #endregion #region get the dollar sharpe decimal avgPnl = newDailyReport.Select(d => d.accPnl).Last() / newDailyReport.Where(d => d.pnl != 0).Count(); List<DailyReport> finalDailyReport = new List<DailyReport>(); decimal SD = 0; int count = 1; foreach(var date in newDailyReport) { DailyReport tempdailyreport = new DailyReport(); tempdailyreport = date; SD = SD + (date.pnl - avgPnl) * (date.pnl - avgPnl); if (count != 1) date.dollarSharp = avgPnl / (decimal)(Math.Sqrt(Convert.ToDouble(SD) / ((count - 1)*252))); finalDailyReport.Add(tempdailyreport); count++; } FileHelperEngine fileHelperEngine5 = new FileHelperEngine(typeof(DailyReport)); fileHelperEngine5.HeaderText = "date,longpnl,shortpnl,longcount,shortcount,quarter,dollarsharp,bp,win10,lose10,spy hedge pnl,pnl overnight, pnl day,pnl,acc pnl"; if (!File.Exists("DailyReport\\R.csv")) { fileHelperEngine5.WriteFile(string.Format("DailyReport\\R.csv", quarter, Convert.ToInt32(threshold * new decimal(100))), finalDailyReport); } else { fileHelperEngine5.AppendToFile(string.Format("DailyReport\\R.csv", quarter, Convert.ToInt32(threshold * new decimal(100))), finalDailyReport); } #endregion TimeSpan en = DateTime.Now.TimeOfDay; Console.WriteLine(en-st); }