private static void GenGameDocs_OnCommand(CommandEventArgs e) { csv = new CsvFile(); AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsClass && t.Namespace == "Server.Mobiles" && typeof(Mobiles.BaseCreature).IsAssignableFrom(t)) .ToList() .ForEach(t => ConsumeType(t, HandleBaseCreature)); csv.Write("Creatures.csv"); csv = new CsvFile(); AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsClass && t.Namespace == "Server.Items" && typeof(Items.BaseWeapon).IsAssignableFrom(t)) .ToList() .ForEach(t => ConsumeType(t, HandleBaseWeapon)); csv.Write("Weapons.csv"); csv = new CsvFile(); AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsClass && t.Namespace == "Server.Items" && typeof(Items.BaseArmor).IsAssignableFrom(t)) .ToList() .ForEach(t => ConsumeType(t, HandleBaseArmor)); csv.Write("Armor.csv"); }
public static CsvFile Download(AbsoluteUri location) { if (null == location) { throw new ArgumentNullException("location"); } CsvFile csv = null; var request = WebRequest.Create((Uri)location); using (var response = request.GetResponse()) { using (var stream = response.GetResponseStream()) { if (null != stream) { using (var reader = new StreamReader(stream)) { #if NET20 var file = new FileInfo(StringExtensionMethods.FormatWith("{0}.csv", AlphaDecimal.Random())); FileInfoExtensionMethods.Create(file, reader.ReadToEnd()); #else var file = new FileInfo("{0}.csv".FormatWith(AlphaDecimal.Random())); file.Create(reader.ReadToEnd()); #endif csv = new CsvFile(file); } } } } return csv; }
private static void dummy() { using (var csvFile = new CsvFile<Client>("clients.csv")) { for (int i = 0; i < 1000000; i++) { var user = Client.RandomClient(); csvFile.Append(user); } } }
public void SaveData() { //Write each location to a row in the CSV file using (var file = new CsvFile<PennySquisherLocation>(CsvFile)) { Locations.ForEach(l => file.Append(l)); } //Also write data to JSON var data = JsonConvert.SerializeObject(this); File.WriteAllText(JsonFile, data); }
/// <summary> /// Writes csv content to a file /// </summary> /// <param name="csvFile">CsvFile</param> /// <param name="filePath">File path</param> /// <param name="encoding">Encoding</param> public void WriteCsv(CsvFile csvFile, string filePath, Encoding encoding) { if (File.Exists(filePath)) File.Delete(filePath); using (StreamWriter writer = new StreamWriter(filePath, false, encoding ?? Encoding.Default)) { WriteToStream(csvFile, writer); writer.Flush(); writer.Close(); } }
public void AddCsv(string filename) { CsvFile file = new CsvFile(filename); if (!CsvFiles.Contains(file)) { CsvFiles.Add(file); csvD.InitialDirectory = Path.GetDirectoryName(filename); databaseD.InitialDirectory = csvD.InitialDirectory; if (string.IsNullOrWhiteSpace(outputTB.Text)) { outputTB.Text = csvD.InitialDirectory; } CheckMods(file); Application.DoEvents(); } }
private static void CreateRandomClientsCsvFile() { Console.WriteLine(); Console.WriteLine("Creating Random Clients..."); using (var csvFile = new CsvFile<Client>("clients.csv")) { for (int i = 0; i < 1000000; i++) { var user = Client.RandomClient(); user.ClientId = i + 100; csvFile.Append(user); if ((i + 1) % 1000 == 0) Console.Write(string.Format("Writing {0} ({1}/{2})\r", "clients.csv", i + 1, 1000000)); } } Console.WriteLine(); }
public static void SetMappers(List<Mapper> mappers) { var csvFile = new CsvFile(); csvFile.Headers.Add("RegexPattern"); csvFile.Headers.Add("Category"); foreach (var mapper in mappers) { var record = new CsvRecord(); record.Fields.Add(mapper.RegexPattern); record.Fields.Add(mapper.Category); csvFile.Records.Add(record); } using (CsvWriter writer = new CsvWriter()) writer.WriteCsv(csvFile, "t"+path); File.Delete(path); File.Move("t" + path, path); }
private static void WriteHjAndQTables(string shiftFileName, string qFileName, UsgsRatingTable ratingTable) { CsvFile.WriteToCSV(ratingTable.hjTable, shiftFileName, false); CsvFile.WriteToCSV(ratingTable.qTable, qFileName, false); }
public IActionResult Download(string password) { if (!string.Equals(password, Environment.GetEnvironmentVariable("PASSWORD_EXPORT"))) { throw new Exception("Password incorrect."); } var modelTypeCsvFilenameGetter = new ModelTypeCsvFilenameGetter(); var consentCsvFile = new CsvFile <ConsentCsvModel, ConsentCsvModel.Map>(modelTypeCsvFilenameGetter); var demographicsCsvFile = new CsvFile <DemographicsCsvModel, DemographicsCsvModel.Map>(modelTypeCsvFilenameGetter); var psqiCsvFile = new CsvFile <PSQICsvModel, PSQICsvModel.Map>(modelTypeCsvFilenameGetter); var epworthCsvFile = new CsvFile <EpworthCsvModel, EpworthCsvModel.Map>(modelTypeCsvFilenameGetter); var stanfordCsvFile = new CsvFile <StanfordCsvModel, StanfordCsvModel.Map>(modelTypeCsvFilenameGetter); var phaseSetsCsvFile = new CsvFile <PhaseSetsCsvModel, PhaseSetsCsvModel.Map>(modelTypeCsvFilenameGetter); var responseDataCsvFile = new CsvFile <ResponseDataCsvModel, ResponseDataCsvModel.Map>(modelTypeCsvFilenameGetter); var sessionMetaCsvFile = new CsvFile <SessionMetaCsvModel, SessionMetaCsvModel.Map>(modelTypeCsvFilenameGetter); var sleepQuestionsCsvFile = new CsvFile <SleepQuestionsCsvModel, SleepQuestionsCsvModel.Map>(modelTypeCsvFilenameGetter); var consentCsvFilePath = consentCsvFile.Write(GetCsvConsents()); var demographicsCsvFilePath = demographicsCsvFile.Write(GetCsvDemographics()); var psqiCsvFilePath = psqiCsvFile.Write(GetCsvPsqi()); var epworthCsvFilePath = epworthCsvFile.Write(GetCsvEpworth()); var stanfordCsvFilePath = stanfordCsvFile.Write(GetCsvStanford()); var phaseSetsCsvFilePath = phaseSetsCsvFile.Write(GetCsvPhaseSets()); var sessionMetaCsvFilePath = sessionMetaCsvFile.Write(GetSessionMetaCsv()); var sleepQuestionsCsvFilePath = sleepQuestionsCsvFile.Write(GetSleepQuestionsCsv()); var zipFilePaths = new List <string> { consentCsvFilePath, demographicsCsvFilePath, psqiCsvFilePath, epworthCsvFilePath, stanfordCsvFilePath, phaseSetsCsvFilePath, sessionMetaCsvFilePath, sleepQuestionsCsvFilePath }; var sessionIDs = _dbContext.ResponseDatas.Select(x => x.SessionID).Distinct(); foreach (var sessionID in sessionIDs) { var sessionResponses = _dbContext.ResponseDatas.Where(x => Guid.Equals(sessionID, x.SessionID)); var sessionFirstResponse = sessionResponses.First(); var participantID = sessionFirstResponse.ParticipantID; var testName = sessionFirstResponse.TestName; var lastWhenOfThisSession = sessionResponses.Select(x => x.WhenUtc).Max(); var lastWhenOfAllSessionsOfThisTest = _dbContext.ResponseDatas.Where(x => string.Equals(participantID, x.ParticipantID) && string.Equals(testName, x.TestName)).Select(x => x.WhenUtc).Max(); var responseData = sessionResponses.OrderBy(x => x.WhenUtc).First(); var filename = $"{responseData.ParticipantID}_{responseData.TestName}{(DateTime.Equals(lastWhenOfThisSession, lastWhenOfAllSessionsOfThisTest) ? string.Empty : $"_{responseData.WhenUtc.ToString("yyyyMMddHHmmss")}")}"; var responseDataCsvFilePath = responseDataCsvFile.WithFilename(filename).Write(GetCsvResponseData(sessionID)); zipFilePaths.Add(responseDataCsvFilePath); } var zipFiles = zipFilePaths.Select(x => (x, System.IO.File.ReadAllBytes(x))).ToList(); var zipArchive = GetZipArchive(zipFiles); zipFilePaths.ForEach(System.IO.File.Delete); var content = new System.IO.MemoryStream(zipArchive); var contentType = "APPLICATION/octet-stream"; var fileName = $"MemoryStudy_{DateTime.Now.ToString("yyyyMMddHHmmss")}.zip"; return(File(content, contentType, fileName)); }
public ActionResult Index(HttpPostedFileBase upload) { if (upload != null && upload.ContentLength > 0) { if (upload.FileName.EndsWith(".csv")) { try { CsvFile csv = new CsvFile(Path.Combine(Server.MapPath("~/App_Data"), Path.GetFileName(upload.FileName))); upload.SaveAs(csv.path); var tableRows = csv.readFile(); var newEmployees = new List <employeeCons>(); foreach (string[] tableRow in tableRows) { int column = 0; var newEmployee = new employeeCons(); bool addEmployee = true; foreach (string tableColumn in tableRow) { switch (column) { case 0: if (tableColumn.Length > 5) //> 5 because some IDs might contain less than 5 digits { addEmployee = false; break; } int ID; int.TryParse(tableColumn, out ID); newEmployee.employeeObject.staffID = ID; break; case 1: newEmployee.employeeObject.firstName = tableColumn; break; case 2: newEmployee.employeeObject.lastName = tableColumn; break; case 3: //username cant be empty, it cant contain no .s, it cant contain spaces if (tableColumn == "" || !tableColumn.Contains(".") || tableColumn.Contains(" ")) { addEmployee = false; break; } //if there's nothing after the dot, the username isn't valid string afterDot = tableColumn.Substring(tableColumn.LastIndexOf('.') + 1); if (afterDot == "") { addEmployee = false; break; } //usernames cannot have more than one . if (tableColumn.Count(x => x == '.') > 1) { addEmployee = false; break; } newEmployee.employeeObject.userName = tableColumn; break; case 4: newEmployee.employeeObject.designation = tableColumn; break; case 5: newEmployee.employeeObject.deptName = tableColumn; break; case 6: newEmployee.employeeObject.gender = tableColumn[0]; break; case 7: /* * DateTime startDate = DateTime.ParseExact(tableColumn, "MM/dd/yyyy", * System.Globalization.CultureInfo.InvariantCulture);*/ DateTime startDate = DateTime.Parse(tableColumn); newEmployee.employeeObject.empStartDate = startDate; break; case 8: newEmployee.employeeObject.email = tableColumn; break; case 9: newEmployee.roles.Add(tableColumn); break; case 10: newEmployee.roles.Add(tableColumn); break; case 11: newEmployee.roles.Add(tableColumn); break; case 12: newEmployee.employeeObject.phoneNo = tableColumn; break; case 13: int annual; int.TryParse(tableColumn, out annual); newEmployee.balances.annual = annual; break; case 14: int maternity; int.TryParse(tableColumn, out maternity); newEmployee.balances.maternity = maternity; break; case 15: int sick; int.TryParse(tableColumn, out sick); newEmployee.balances.sick = sick; break; case 16: int compassionate; int.TryParse(tableColumn, out compassionate); newEmployee.balances.compassionate = compassionate; break; case 17: int dil; int.TryParse(tableColumn, out dil); newEmployee.balances.daysInLieue = dil; break; case 18: int hours; int.TryParse(tableColumn, out hours); newEmployee.balances.shortLeaveHours = hours; break; } // end of switch if (!addEmployee) { break; } newEmployee.employeeObject.password = RandomPassword.Generate(7, 7); column++; } // end of tablecolumn foreach if (addEmployee) { newEmployees.Add(newEmployee); } column = 0; } // end of tablerow foreach foreach (var employee in newEmployees) { var connectionString = ConfigurationManager.ConnectionStrings["DefaultConnection"].ConnectionString; //Check if employee with same ID and username already exists. //If it does then move on to the next employee. string queryString = "SELECT Employee_ID, User_Name FROM dbo.Employee WHERE Employee_ID = " + employee.employeeObject.staffID + " OR User_Name = '" + employee.employeeObject.userName + "'"; using (var connection = new SqlConnection(connectionString)) { var command = new SqlCommand(queryString, connection); connection.Open(); using (var reader = command.ExecuteReader()) { bool toContinue = false; while (reader.Read()) { toContinue = true; break; } if (toContinue) { continue; } } connection.Close(); } //End check for existing employee //Insert employee data into the database table queryString = "INSERT INTO dbo.Employee (Employee_ID, First_Name, " + "Last_Name, User_Name, Password, Designation, Email, Gender, PH_No, " + "Emp_Start_Date, Account_Status) VALUES('" + employee.employeeObject.staffID + "', '" + employee.employeeObject.firstName + "', '" + employee.employeeObject.lastName + "', '" + employee.employeeObject.userName + "', '" + employee.employeeObject.password + "', '" + employee.employeeObject.designation + "', '" + employee.employeeObject.email + "', '" + employee.employeeObject.gender + "', '" + employee.employeeObject.phoneNo + "', '" + employee.employeeObject.empStartDate + "', '" + "True" + "')"; using (var connection = new SqlConnection(connectionString)) { var command = new SqlCommand(queryString, connection); connection.Open(); using (var reader = command.ExecuteReader()) connection.Close(); } //End employee insertion //Employee Balances insertion //lets assume that all the leave types and //their appropriate max balances will be included //in the database from the getgo // for (int i = 0; i <= 5; i++) { int balance = 0; int employeeID = employee.employeeObject.staffID; int leaveID = 0; switch (i) { case 0: balance = employee.balances.annual; leaveID = 1; break; case 1: balance = employee.balances.maternity; leaveID = 2; break; case 2: balance = employee.balances.sick; leaveID = 3; break; case 3: balance = employee.balances.compassionate; leaveID = 4; break; case 4: balance = employee.balances.daysInLieue; leaveID = 5; break; case 5: balance = employee.balances.shortLeaveHours; leaveID = 6; break; } // end of switch queryString = "INSERT INTO dbo.Leave_Balance (Employee_ID, Leave_ID, Balance) " + "VALUES ('" + employeeID + "', '" + leaveID + "', '" + balance + "')"; using (var connection = new SqlConnection(connectionString)) { var command = new SqlCommand(queryString, connection); connection.Open(); using (var reader = command.ExecuteReader()) connection.Close(); } } //end employee balances insertion //Employee roles insertion foreach (var role in employee.roles) { int roleID = 0; queryString = "SELECT Role_ID FROM dbo.Role WHERE Role_Name = '" + role + "'"; using (var connection = new SqlConnection(connectionString)) { var command = new SqlCommand(queryString, connection); connection.Open(); using (var reader = command.ExecuteReader()) { while (reader.Read()) { roleID = (int)reader[0]; } } connection.Close(); } if (roleID == 0) { continue; } queryString = "INSERT INTO dbo.Employee_Role (Employee_ID, Role_ID) " + "VALUES ('" + employee.employeeObject.staffID + "', '" + roleID + "')"; using (var connection = new SqlConnection(connectionString)) { var command = new SqlCommand(queryString, connection); connection.Open(); using (var reader = command.ExecuteReader()) connection.Close(); } } //End employee roles insertion } /* I tried having only a single connection open for adding all of the entries * gave an error saying i need to close the previous connection for new one * var connectionString = ConfigurationManager.ConnectionStrings["DefaultConnection"].ConnectionString; * * using (var connection = new SqlConnection(connectionString)) * { * connection.Open(); * foreach (var employee in newEmployees) * { * string queryString = "INSERT INTO dbo.Employee (Employee_ID, First_Name, " + * "Last_Name, User_Name, Password, Designation, Email, Gender, PH_No, " + * "Emp_Start_Date, Account_Status) VALUES('" + employee.employeeObject.staffID + * "', '" + employee.employeeObject.firstName + "', '" + employee.employeeObject.lastName + * "', '" + employee.employeeObject.userName + * "', '" + employee.employeeObject.password + "', '" + employee.employeeObject.designation + * "', '" + employee.employeeObject.email + "', '" + employee.employeeObject.gender + * "', '" + employee.employeeObject.phoneNo + "', '" + employee.employeeObject.empStartDate + * "', '" + "True" + "')"; * * var command = new SqlCommand(queryString, connection); * * command.ExecuteReader(); * } * connection.Close(); * }*/ ViewBag.Message = "File uploaded successfully"; } catch (Exception ex) { ViewBag.Message = "ERROR:" + ex.Message.ToString(); } } else { ViewBag.Message = "File type not supported"; ModelState.AddModelError("upload", "The file type is not supported"); return(View()); } } else { ViewBag.Message = "You have not specified a file"; } tempEmp.firstName = upload.FileName; return(View(tempEmp)); }
private void ProcessPage(string[] lines, DateTime t1, DateTime t2) { /* * Wed Jan 19 06:57:29 PST 2011 NRCS National Water and Climate Center - Provisional Data - subject to revision * Site Id,Date,Time (),WTEQ.I-1 (in) ,PREC.I-1 (in) ,TOBS.I-1 (degC) ,TMAX.D-1 (degC) ,TMIN.D-1 (degC) ,TAVG.D-1 (degC) , * 679,2000-10-01,, 0.0, 0.0, 7.6, 9.1, 7.6, 8.5, * 679,2000-10-02,, 0.0, 0.4, 0.0, 9.2, 0.0, 4.4, * ... * * 679,2001-09-30,, 0.0, 76.4, 10.0, 15.4, 4.2, 9.2, * 679,2001-09-30,23:59, 0.2,-99.9, 13.3, 20.4, 9.4, 14.0, * * * */ int idxTime = 2; // index to time stamp (ignore the 23:59) entry.. TextFile tf = new TextFile(lines); int idx = tf.IndexOf("Site Id"); if (idx < 0) { return; } // find column index for parameter string[] tokens = CsvFile.ParseCSV(tf[idx]); int idxData = Array.IndexOf(tokens, parameterName); if (idxData < 0) { return; } for (int i = idx + 1; i < tf.Length; i++) { if (tf[i].Trim() == "") { continue; } string[] data = CsvFile.ParseCSV(tf[i]); if (data[idxData].Trim() == "") { continue; } if (data[idxTime].IndexOf("23:59") >= 0) { continue; } DateTime t; if (!DateTime.TryParse(data[1], out t)) { Logger.WriteLine("Skippling data '" + tf[i] + "'"); continue; } /* FROM NRCS: * Daily sensors report a summary value for the previous day. * Hourly sensors report a summary value for the previous hour. * Instantaneous sensors are included with both Daily and Hourly sensor selections */ // Hydromet convention: // midnight values 00:00 are reported previous days daily value // daily snotel is reported previous day // so we move back 1 day for both cases. // Just using NRCS data as it is to avoid confusion. //if (AdjustDates) //{ // t = t.AddDays(-1); //} if (t < t1 || t > t2) { continue; } double value; if (!double.TryParse(data[idxData], out value)) { Logger.WriteLine("Skippling data '" + tf[i] + "'"); } // missing values are -99.9 if (System.Math.Abs(value + 99.9) < 0.1) { AddMissing(t); } else { Add(t, value); } } }
public ActionResult Index(IEnumerable <HttpPostedFileBase> fileNames, string rbGrp) { string UserName = Session["UserName"].ToString(); string name = rbGrp.ToString(); if (name == "Delayed") { foreach (HttpPostedFileBase fileAB in fileNames) { if (fileAB != null && fileAB.ContentLength > 0) { String FileExtn = System.IO.Path.GetExtension(fileAB.FileName); if (!(FileExtn == ".csv" || FileExtn == ".CSV")) { ViewBag.Error = "Only CSV are allowed!"; return(View()); } else { DataTable dt = new DataTable(); if (ModelState.IsValid) { List <FileDetail> fileDetails = new List <FileDetail>(); List <CsvFile> listcsvfiles = new List <CsvFile>(); List <CsvFile> Csvfiles = new List <CsvFile>(); string root = "~/App_Data/Delayed/CsvFile/UserName-" + UserName + "/"; var dirName = "~/App_Data/Delayed/CsvFile/UserName-" + UserName + "/Date-" + string.Format("{0:dd-MM-yyyy}", DateTime.Now) + "/"; if (!Directory.Exists(root)) { System.IO.Directory.CreateDirectory(Server.MapPath(root)); } if (!Directory.Exists(dirName)) { System.IO.Directory.CreateDirectory(Server.MapPath(dirName)); } var fileName = Path.GetFileNameWithoutExtension(fileAB.FileName); var fileNameWithExt = Path.GetFileNameWithoutExtension(fileAB.FileName) + DateTime.Now.ToString("dd-MM-yyyy_hh-mm-ss") + ".csv"; var path = Path.Combine(Server.MapPath(dirName), fileName + DateTime.Now.ToString("dd-MM-yyyy_hh-mm-ss") + ".csv"); fileAB.SaveAs(path); FileDetail fileDetail = new FileDetail() { FileName = fileNameWithExt, Extension = Path.GetExtension(fileNameWithExt), Id = Guid.NewGuid() }; fileDetails.Add(fileDetail); DelayedUpload upload = new DelayedUpload(); upload.FileDetails = fileDetails; db.DelayedUploads.Add(upload); db.SaveChanges(); ViewBag.Message = "Sccessfully upload files on server."; } } } else { ViewBag.Error = "Please upload a files ."; return(View()); } } } else { foreach (HttpPostedFileBase fileAB in fileNames) { if (fileAB != null && fileAB.ContentLength > 0) { String FileExtn = System.IO.Path.GetExtension(fileAB.FileName); if (!(FileExtn == ".csv" || FileExtn == ".CSV")) { ViewBag.Error = "Only CSV are allowed!"; return(View()); } else { DataTable dt = new DataTable(); if (ModelState.IsValid) { List <FileDetail> fileDetails = new List <FileDetail>(); List <CsvFile> listcsvfiles = new List <CsvFile>(); string root = "~/App_Data/Instant/CsvFile/UserName-" + UserName + "/"; var dirName = "~/App_Data/Instant/CsvFile/UserName-" + UserName + "/Date-" + string.Format("{0:dd-MM-yyyy}", DateTime.Now) + "/"; if (!Directory.Exists(root)) { System.IO.Directory.CreateDirectory(Server.MapPath(root)); } if (!Directory.Exists(dirName)) { System.IO.Directory.CreateDirectory(Server.MapPath(dirName)); } var fileName = Path.GetFileNameWithoutExtension(fileAB.FileName); var fileNameWithExt = Path.GetFileNameWithoutExtension(fileAB.FileName) + DateTime.Now.ToString("dd-MM-yyyy_hh-mm-ss") + ".csv"; FileDetail fileDetail = new FileDetail() { FileName = fileNameWithExt, Extension = Path.GetExtension(fileNameWithExt), Id = Guid.NewGuid() }; fileDetails.Add(fileDetail); var path = Path.Combine(Server.MapPath(dirName), fileName + DateTime.Now.ToString("dd-MM-yyyy_hh-mm-ss") + ".csv"); fileAB.SaveAs(path); dt = ProcessCSV(path); DelayedUpload upload1 = new DelayedUpload(); upload1.FileDetails = fileDetails; db.DelayedUploads.Add(upload1); ViewBag.Message = ProcessBulkCopy(dt); CsvFile upload = new CsvFile(); listcsvfiles.Add(upload); db.csvfiles.Add(upload); db.SaveChanges(); ViewBag.Message = "Successfully Uploaded Sample files !!!"; } } } else { ViewBag.Error = "Please Select CSV Files."; return(View()); } } } return(View()); //return RedirectToAction("csvFileUpload"); }
public CsvDataRow(CsvRow raw, CsvFile parent) { this.raw = raw; this.parent = parent; }
/// <summary> /// 反序列化. /// </summary> public static CsvFile Deserialize(MemoryStream stream) { int ver = FileMgr.ReadInt(stream); if (ver != CsvFileMgr.Version) { //LogMgr.Trace("Csv版本 {0} 错误, 最新版本 {1}", ver, CsvFileMgr.Version); return(null); } // 文件名 string name = FileMgr.ReadString(stream); CsvFile csv = new CsvFile(name); csv.primaryKey = FileMgr.ReadString(stream); // 主key类型 var pkeyType = (LPC.LPCValue.ValueType)FileMgr.ReadInt(stream); // 列名 int n = FileMgr.ReadInt(stream); csv.columns = new Dictionary <string, int>(n); for (int i = 0; i < n; i++) { string k = FileMgr.ReadString(stream); int v = FileMgr.ReadInt(stream); csv.columns.Add(k, v); } // 行数 n = FileMgr.ReadInt(stream); csv.rows = new CsvRow[n]; // 主key的列 int pkeyIdx = csv.columns[csv.primaryKey]; // 每行主key for (var i = 0; i < n; i++) { var row = new CsvRow(csv); if (pkeyType == LPCValue.ValueType.INT) { int pkey = FileMgr.ReadInt(stream); row.Add(pkeyIdx, LPCValue.Create(pkey)); } else { string pkey = FileMgr.ReadString(stream); row.Add(pkeyIdx, LPCValue.Create(pkey)); } csv.AddRow(i, row); } // 行数据 for (int i = 0; i < n; i++) { int len = FileMgr.ReadInt(stream); csv.rows[i].rowData = new byte[len]; stream.Read(csv.rows[i].rowData, 0, len); } return(csv); }
static void Main(string[] args) { Logger.EnableLogger(); if (args.Length < 2 || args.Length > 3) { Console.WriteLine("Usage: GetUsace site_list.csv hourly|daily [dump.pdb] "); Console.WriteLine("Where: site_list.csv is a catalog of sites to import"); Console.WriteLine(" houly or daily data"); Console.WriteLine(" dump.db creates a test pisces database for comparison to hydromet"); return; } FileUtility.CleanTempPath(); CsvFile csv = new CsvFile(args[0]); //interval,filename,cbtt,pcode,header1,header2,header3,header4,header5 //instant,gcl_h.dat,GCL,FB,Forebay,(ft),,, //instant,gcl_h.dat,GCL,TW,Tailwatr,(ft),,, //instant,gcl_h.dat,GCL,QE,Generatn,Flow,(kcfs),, TimeSeriesDatabase db = null; if (args.Length == 3) { SQLiteServer svr = new SQLiteServer(args[2]); db = new TimeSeriesDatabase(svr); } var rows = csv.Select("interval = '" + args[1] + "'"); var interval = TimeInterval.Daily; if (args[1] == "hourly") { interval = TimeInterval.Hourly; } Console.WriteLine("Processing " + rows.Length + " parameters"); for (int i = 0; i < rows.Length; i++) { var url = rows[i]["url"].ToString(); var cbtt = rows[i]["cbtt"].ToString(); var pcode = rows[i]["pcode"].ToString(); string[] headers = GetHeaders(rows[i]); var soffset = rows[i]["offset"].ToString(); double offset = 0; if (soffset.Trim() != "") { offset = double.Parse(soffset); } var s = ProcessFile(url, interval, cbtt, pcode, offset, headers); if (db != null) { SaveToDatabase(args, db, cbtt, pcode, s); } } }
/// <summary> /// Run the code example. /// </summary> public void Run(AdManagerUser user) { using (PublisherQueryLanguageService pqlService = user.GetService <PublisherQueryLanguageService>()) { // First day of last month. System.DateTime lastMonth = System.DateTime.Now .AddDays(1 - System.DateTime.Now.Day) .AddMonths(-1); // Create statement to select MCM earnings for the prior month. StatementBuilder statementBuilder = new StatementBuilder() .Select("Month, ChildName, ChildNetworkCode, TotalEarningsCurrencyCode," + " TotalEarningsMicros, ParentPaymentCurrencyCode, ParentPaymentMicros," + " ChildPaymentCurrencyCode, ChildPaymentMicros, DeductionsMicros") .From("Mcm_Earnings") .Where("Month = :month") .OrderBy("ChildNetworkCode") .Limit(StatementBuilder.SUGGESTED_PAGE_LIMIT) .AddValue("month", DateTimeUtilities.FromDateTime(lastMonth, "America/New_York").date); int resultSetSize = 0; List <Row> allRows = new List <Row>(); ResultSet resultSet; try { do { // Get earnings information. resultSet = pqlService.select(statementBuilder.ToStatement()); // Collect all data from each page. allRows.AddRange(resultSet.rows); // Display results. Console.WriteLine(PqlUtilities.ResultSetToString(resultSet)); statementBuilder.IncreaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT); resultSetSize = resultSet.rows == null ? 0 : resultSet.rows.Length; } while (resultSetSize == StatementBuilder.SUGGESTED_PAGE_LIMIT); Console.WriteLine("Number of results found: " + allRows.Count); // Optionally, save all rows to a CSV. // Get a string array representation of the data rows. resultSet.rows = allRows.ToArray(); List <String[]> rows = PqlUtilities.ResultSetToStringArrayList(resultSet); // Write the contents to a csv file. CsvFile file = new CsvFile(); file.Headers.AddRange(rows[0]); file.Records.AddRange(rows.GetRange(1, rows.Count - 1).ToArray()); file.Write("Earnings_Report_" + this.GetTimeStamp() + ".csv"); } catch (Exception e) { Console.WriteLine("Failed to get MCM earnings. Exception says \"{0}\"", e.Message); } } }
private static void ValidateTest1Properties(CsvFile file) { var properties = TypeDescriptor.GetProperties(file.Rows[0]); Assert.AreEqual(4, properties.Count); Assert.AreEqual("Text", properties[0].Name); Assert.AreEqual(typeof(int), properties[1].PropertyType); Assert.AreEqual(17, properties[1].GetValue(file.Rows[1])); }
//[Test] //public void PropertyDescriptor() //{ // var properties = TypeDescriptor.GetProperties(typeof(CsvFile.CsvRow)); // Assert.AreEqual(0, properties.Count); //} private static void ValidateTest1(CsvFile file) { Assert.AreEqual(4, file.Columns.Count); Assert.AreEqual(typeof(string), file.Columns[0].Type); Assert.AreEqual(typeof(int), file.Columns[1].Type); Assert.AreEqual("km", file.Columns[2].Unit); Assert.AreEqual(2, file.Rows.Count); }
public CsvInternalImportOptions(CsvFile file, CsvImportOptions options) { File = file; Options = options; }
public object Import(RedirectsProviderFile file, CsvImportOptions options) { if (file == null) { throw new ArgumentNullException(nameof(file)); } if (options == null) { throw new ArgumentNullException(nameof(options)); } // Determine the encoding Encoding encoding; switch (options.Encoding) { case CsvImportEncoding.Ascii: encoding = Encoding.ASCII; break; case CsvImportEncoding.Utf8: encoding = Encoding.UTF8; break; case CsvImportEncoding.Windows1252: encoding = Encoding.GetEncoding(1252); break; default: encoding = Encoding.GetEncoding(1252); //using (var reader = new StreamReader(file.InputStream, Encoding.Default, true)) { // reader.Peek(); // you need this! // encoding = reader.CurrentEncoding; //} break; } // Load the CSV file from the stream CsvFile csv; using (Stream stream = file.InputStream) { csv = CsvFile.Load(stream, encoding); } CsvInternalImportOptions io = new CsvInternalImportOptions(csv, options); // Determine the columns MapCsvColumns(io); // Parse the rows List <RedirectImportItem> redirects = ParseCsvRows(io); var service = new RedirectsImportService(); foreach (var redirect in redirects) { service.Import(redirect, options); } return(redirects); }
/// <summary> /// Run the code example. /// </summary> public void Run(AdManagerUser user) { using (PublisherQueryLanguageService pqlService = user.GetService <PublisherQueryLanguageService>()) { // Create statement to select recent changes. Change_History only supports ordering // by descending ChangeDateTime. Offset is not supported. To page, use the change ID // of the earliest change as a pagination token. A date time range is required when // querying this table. System.DateTime endDateTime = System.DateTime.Now; System.DateTime startDateTime = endDateTime.AddDays(-1); StatementBuilder statementBuilder = new StatementBuilder() .Select("Id, ChangeDateTime, EntityId, EntityType, Operation, UserId") .From("Change_History") .Where("ChangeDateTime < :endDateTime AND ChangeDateTime > :startDateTime") .OrderBy("ChangeDateTime DESC") .AddValue("startDateTime", DateTimeUtilities.FromDateTime(startDateTime, "America/New_York")) .AddValue("endDateTime", DateTimeUtilities.FromDateTime(endDateTime, "America/New_York")) .Limit(StatementBuilder.SUGGESTED_PAGE_LIMIT); int resultSetSize = 0; List <Row> allRows = new List <Row>(); ResultSet resultSet; do { resultSet = pqlService.select(statementBuilder.ToStatement()); if (resultSet.rows != null && resultSet.rows.Length > 0) { // Get the earliest change ID in the result set. Row lastRow = resultSet.rows[resultSet.rows.Length - 1]; string lastId = (string)PqlUtilities.GetValue(lastRow.values[0]); // Collect all changes from each page. allRows.AddRange(resultSet.rows); // Display results. Console.WriteLine(PqlUtilities.ResultSetToString(resultSet)); // Use the earliest change ID in the result set to page. statementBuilder .Where("Id < :id AND ChangeDateTime < :endDateTime AND " + "ChangeDateTime > :startDateTime").AddValue("id", lastId); } resultSetSize = resultSet.rows == null ? 0 : resultSet.rows.Length; } while (resultSetSize == StatementBuilder.SUGGESTED_PAGE_LIMIT); Console.WriteLine("Number of results found: " + allRows.Count); // Optionally, save all rows to a CSV. // Get a string array representation of the data rows. resultSet.rows = allRows.ToArray(); List <String[]> rows = PqlUtilities.ResultSetToStringArrayList(resultSet); // Write the contents to a csv file. CsvFile file = new CsvFile(); file.Headers.AddRange(rows[0]); file.Records.AddRange(rows.GetRange(1, rows.Count - 1).ToArray()); file.Write("recent_changes_" + this.GetTimeStamp() + ".csv"); } }
public ReportStrategy(string output) { this.output = output; this.references = new Dictionary <string, HashSet <string> >(); this.outFile = new CsvFile(Path.GetFileNameWithoutExtension(output) + "_1" + Path.GetExtension(output)); }
public void CsvWriter_WriteCsvFileObjectToStream() { string content = string.Empty; using (MemoryStream memoryStream = new MemoryStream()) { CsvFile csvFile = new CsvFile(); csvFile.Populate(true, TEST_DATA_5); using (CsvWriter writer = new CsvWriter()) { writer.WriteCsv(csvFile, memoryStream); using (StreamReader reader = new StreamReader(memoryStream)) { content = reader.ReadToEnd(); } } } Assert.IsTrue(string.Compare(content, TEST_DATA_5) == 0); }
private void PopulateDataSet(CsvFile data) { DataSet = MakeDataSet(data); }
public async Task <IActionResult> GetMemberImportTemplate(Guid chapterId) { CsvFile file = await _memberAdminService.GetMemberImportFile(GetMemberId(), chapterId); return(CsvResult(file, "Members.csv")); }
/// <summary> /// Writes csv content to a stream /// </summary> /// <param name="csvFile">CsvFile</param> /// <param name="stream">Stream</param> public void WriteCsv(CsvFile csvFile, Stream stream) { WriteCsv(csvFile, stream, null); }
static void Main(string[] argList) { if (argList.Length == 0) { Usage(); return; } DateTime t = DateTime.Now.Date; bool recentDataOnly = true;// defaults using only last 4 hours, unless dates are specified. Arguments args = new Arguments(argList); if (args.Contains("t")) { recentDataOnly = false; if (!DateTime.TryParse(args["t"], out t)) { Console.WriteLine("Error: invalid date '" + args["t"] + "'"); Usage(); return; } } DateTime t1 = t; DateTime t2 = t; if (args.Contains("t1")) { recentDataOnly = false; if (!DateTime.TryParse(args["t1"], out t1)) { Console.WriteLine("Error: invalid date t1 '" + args["t1"] + "'"); Usage(); return; } } if (args.Contains("t2")) { recentDataOnly = false; if (!DateTime.TryParse(args["t2"], out t2)) { Console.WriteLine("Error: invalid date t2 '" + args["t2"] + "'"); Usage(); return; } } if (!args.Contains("config")) { Console.WriteLine("Error: --config=filename.csv is required"); Usage(); return; } if (!args.Contains("output")) { Console.WriteLine("Error: --output=filename.txt is required"); Usage(); return; } // read config file. // cbtt,inel_id,inel_code,hydromet_pcode DataTable csv = new CsvFile(args["config"], CsvFile.FieldTypes.AllText); if (args.Contains("cbtt")) // filter specific site { Console.WriteLine("Filtering for cbtt = '" + args["cbtt"] + "'"); csv = DataTableUtility.Select(csv, "cbtt='" + args["cbtt"] + "'", ""); } t = t1; while (t <= t2) { ProcessDate(t, args, csv, recentDataOnly); t = t.AddDays(1).Date; } }
private CsvFile CreateCsvFile(List<string> headers, List<string> fields) { CsvFile csvFile = new CsvFile(); headers.ForEach(header => csvFile.Headers.Add(header)); CsvRecord record = new CsvRecord(); fields.ForEach(field => record.Fields.Add(field)); csvFile.Records.Add(record); return csvFile; }
/// <summary>保存到Csv文件</summary> /// <param name="file"></param> public void SaveCsv(String file) { using var csv = new CsvFile(file, true); csv.WriteLine(Columns); csv.WriteAll(Rows); }
/// <summary> /// Подписка на событие : OpenFileDialog : Нажат кнопка меню "OK". /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void openFileDialog_FileOk(object sender, CancelEventArgs e) { if (e.Cancel) { return; } List <string[]> items; if (CsvFile.Open(openFileDialog.FileName, '\t', out items)) { tagBrowserControl.Clear(); #region [ PARSE ROW ] foreach (string[] item in items) { string deviceName = ""; string tagName = ""; string fragmentLength = null; string readUpdateRate = null; string radix = null; string writeValue = null; switch (item.Length) { case 1: tagName = item[0]; break; case 2: deviceName = item[0]; tagName = item[1]; break; case 3: deviceName = item[0]; tagName = item[1]; fragmentLength = item[2]; break; case 4: deviceName = item[0]; tagName = item[1]; fragmentLength = item[2]; readUpdateRate = item[3]; break; case 6: deviceName = item[0]; tagName = item[1]; fragmentLength = item[2]; readUpdateRate = item[3]; radix = item[3]; writeValue = item[4]; break; } LogixTagHandler tag = new LogixTagHandler(tagName); if (fragmentLength != null) { UInt16 value; if (UInt16.TryParse(fragmentLength, out value)) { tag.Type.ArrayDimension.Value = value; } else { // TODO Message. } } if (readUpdateRate != null) { UInt16 value; if (UInt16.TryParse(readUpdateRate, out value)) { tag.ReadUpdateRate = value; } else { // TODO Message. } } if (radix != null && writeValue != null) { TagValueRadix tagValueRadix; if (Enum.TryParse <TagValueRadix>(radix, true, out tagValueRadix)) { //if (!tag.WriteValueControl.SetValueText(0, tagValueRadix, writeValue)) //{ // // TODO Message. //} } else { // TODO Message. } } tagBrowserControl.Add(deviceName, tag); } #endregion } else { MessageBox.Show("Error! Can't open file!", "Registrator", MessageBoxButtons.OK, MessageBoxIcon.Stop); } }
/// <summary>从Csv文件加载</summary> /// <param name="file"></param> public void LoadCsv(String file) { using var csv = new CsvFile(file, false); Columns = csv.ReadLine(); Rows = csv.ReadAll(); }
public string ProcessFiles(IEnumerable <InputFile> files) { try { var result = new StringBuilder(); _logger.LogInformation($"Start processing {files.Count()} files"); result.AppendLine($"{files.Count()} files was uploaded:"); //Grouping by Customer Name and Data Structure var parsedNames = files.Select(file => { var csvFile = new CsvFile(); var parseResult = csvFile.TryParseFileName(file.FileName); return(new { ParseResult = parseResult, CsvFile = csvFile, file.OpenStream }); }); foreach (var wrongName in parsedNames.Where(file => !file.ParseResult)) { result.AppendLine($"• File \"{wrongName.CsvFile.FileName}\" has wrong name format"); } var groups = parsedNames .Where(file => file.ParseResult) .GroupBy(file => new { file.CsvFile.CustomerName, file.CsvFile.DataStructure }); foreach (var group in groups) { var orderedFiles = group.OrderBy(file => file.CsvFile.Date); CsvFile previousFile = null; foreach (var file in orderedFiles) { using (var stream = file.OpenStream()) { var parseResult = file.CsvFile.TryParseData(stream); if (!parseResult) { result.AppendLine($"• File \"{file.CsvFile.FileName}\" has wrong data format"); } else { result.AppendLine($"• Analyse file \"{file.CsvFile.FileName}\":"); result.AppendLine($"\tNumber of columns: {file.CsvFile.NumberOfColumns}; Number of rows: {file.CsvFile.NumberOfRows}"); foreach (var column in file.CsvFile.Columns) { result.AppendLine($"\tColumn name: \"{column.ColumnName}\"; Number of unique values: {column.NumberOfUnique}; Most frequent value: \"{column.MostFrequentValue}\""); } if (previousFile != null) { var addedColumns = file.CsvFile.Columns.Select(c => c.ColumnName).Except(previousFile.Columns.Select(c => c.ColumnName)); if (addedColumns.Any()) { result.AppendLine($"\t\tAdded columns: {string.Join(", ", addedColumns)}"); } var removedColumns = previousFile.Columns.Select(c => c.ColumnName).Except(file.CsvFile.Columns.Select(c => c.ColumnName)); if (removedColumns.Any()) { result.AppendLine($"\t\tRemoved columns: {string.Join(", ", removedColumns)}"); } } previousFile = file.CsvFile; var customer = _context.Customers.FirstOrDefault(customer => customer.Name == file.CsvFile.CustomerName); if (customer == null) { customer = new Customer { Name = file.CsvFile.CustomerName }; _context.Customers.Add(customer); } var structure = _context.DataStructures.FirstOrDefault(structure => structure.Name == file.CsvFile.DataStructure); if (structure == null) { structure = new DataStructure { Name = file.CsvFile.DataStructure }; _context.DataStructures.Add(structure); } var fileEntity = new CsvFileEntity { FileName = file.CsvFile.FileName, Customer = customer, Structure = structure, Date = file.CsvFile.Date, ProcessingDate = DateTime.Now, NumberOfColumns = file.CsvFile.NumberOfColumns, NumberOfRows = file.CsvFile.NumberOfRows, Columns = new List <CsvFileColumnEntity>() }; foreach (var fileColumn in file.CsvFile.Columns) { var columnEntity = _context.Columns .FirstOrDefault(column => column.Name == fileColumn.ColumnName && column.Customer == customer && column.Structure == structure); if (columnEntity == null) { columnEntity = new CsvFileColumnEntity { Name = fileColumn.ColumnName, Customer = customer, Structure = structure }; _context.Columns.Add(columnEntity); } fileEntity.Columns.Add(columnEntity); } _context.Files.Add(fileEntity); _context.SaveChanges(); } } } } return(result.ToString()); } catch (Exception exception) { _logger.LogError(exception, "Exeception during processing files"); return("Processing failed, please contact administrator"); } }
public static IPagedCsvFile Create(CsvFile file, int pageSize, int page = 1) { return(new PagedCsvFile(file, pageSize, page)); }
/// <summary> /// Runs the code example. /// </summary> /// <param name="client">The Google Ads client.</param> /// <param name="customerId">The Google Ads customer ID for which the call is made.</param> /// <param name="outputFilePath">The path to which the CSV file is written.</param> public void Run(GoogleAdsClient client, long customerId, string outputFilePath) { GoogleAdsServiceClient googleAdsServiceClient = client.GetService(Services.V10.GoogleAdsService); // Create a query that retrieves campaigns. string query = @" SELECT campaign.id, campaign.name, segments.date, metrics.impressions, metrics.clicks, metrics.cost_micros FROM campaign WHERE segments.date DURING LAST_30_DAYS AND campaign.status = 'ENABLED' ORDER BY segments.date DESC"; // Issues a search request. googleAdsServiceClient.SearchStream(customerId.ToString(), query, delegate(SearchGoogleAdsStreamResponse response) { if (response.Results.Count() == 0) { Console.WriteLine("No results found!"); return; } CsvFile csvFile = new CsvFile(); // Set the header for the CSV file. csvFile.Headers.AddRange(response.FieldMask.Paths); // Iterate over all returned rows and extract the information. foreach (GoogleAdsRow googleAdsRow in response.Results) { csvFile.Records.Add(new string[] { googleAdsRow.Campaign.Id.ToString(), googleAdsRow.Campaign.Name, googleAdsRow.Segments.Date, googleAdsRow.Metrics.Impressions.ToString(), googleAdsRow.Metrics.Clicks.ToString(), googleAdsRow.Metrics.CostMicros.ToString() }); } if (outputFilePath == null) { outputFilePath = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile) + Path.DirectorySeparatorChar + GetType().Name + DateTime.Now.ToString("-yyyyMMMMdd-HHmmss") + ".csv"; } else if (!outputFilePath.EndsWith(".csv")) { outputFilePath += ".csv"; } // Create the file with the specified path, write all lines, and close it. csvFile.Write(outputFilePath); Console.WriteLine( $"Successfully wrote {response.Results.Count()} entries to {outputFilePath}."); } ); }
public async void AddCsv(string filename) { CsvFile file = new CsvFile(filename); if (!CsvFiles.Contains(file)) { CsvFiles.Add(file); csvD.InitialDirectory = Path.GetDirectoryName(filename); databaseD.InitialDirectory = csvD.InitialDirectory; if (string.IsNullOrWhiteSpace(outputTB.Text)) { outputTB.Text = csvD.InitialDirectory; } var mods = await CheckMods(file); UsedModifications.UnionWith(mods); ignoreModsCLB.Items.Clear(); ignoreModsCLB.Items.AddRange(UsedModifications.ToArray()); } }
private void ImportButton_Click(object sender, RoutedEventArgs e) { var dlg = new Microsoft.Win32.OpenFileDialog(); dlg.FileName = "data"; // Default file name dlg.DefaultExt = ".csv"; // Default file extension dlg.Filter = "Text documents (.csv)|*.csv"; // Filter files by extension Nullable<bool> result = dlg.ShowDialog(); if (!result.HasValue || !result.Value) return; string filename = dlg.FileName; var records = new List<List<string>>(); using (CsvReader reader = new CsvReader(filename, Encoding.Default)) { while (reader.ReadNextRecord()) records.Add(reader.Fields); } if (records[0][0] == "Number") records.Remove(records[0]); //number,date,acc,amount,transactiontype,notes csvFile = new CsvFile(); var headers = new List<string> { "Number", "Date", "Account", "Amount", "Transaction Type", "Notes", "Category", "Sub Category" }; headers.ForEach(header => csvFile.Headers.Add(header)); foreach (var fields in records) { var record = new CsvRecord(); fields.ForEach(field => record.Fields.Add(field)); SetCategory(record); csvFile.Records.Add(record); } }
private Task<ISet<string>> CheckMods(CsvFile file) { Task<ISet<string>> t = new Task<ISet<string>>(() => { HashSet<string> localMods = new HashSet<string>(); using (CsvReader reader = new CsvReader(new StreamReader(file.FilePath), true)) { while (reader.ReadNextRecord()) { string modLine = reader["Mods"]; if (string.IsNullOrEmpty(modLine)) continue; string[] mods = modLine.Split(','); foreach (string modName in mods.Select(mod => mod.Split(':')[0])) { localMods.Add(modName); } } } return localMods; }); t.Start(); return t; }
private static void UpdatesingleRatingTable(DataRow dRow, bool generateNewTables, CsvFile inputText, List <string> stationUpdateList, List <string> attachments, out string attachmentRecipients) { string urlDownload = ""; // Define parameters to be used for this checking iteration string cbtt = dRow["cbtt"].ToString().ToLower(); attachmentRecipients = ""; string stationID = dRow["site_id"].ToString(); var email = dRow["email"].ToString(); string agency = dRow["agency"].ToString(); Console.Write(cbtt.PadRight(8, '.') + " " + agency.ToLower().PadLeft(5)); // Check if the RDB is currently in the system string rdbFileName = Path.Combine(hydrometRTFs, cbtt + ".rdb"); string shiftFileName = Path.Combine(Path.Combine(hydrometRTFs, "_hj_tables"), cbtt + "_hj.csv"); string qFileName = Path.Combine(Path.Combine(hydrometRTFs, "_q_tables"), cbtt + "_q.csv"); // Get full RatingTableFile DataTable fullRatingTable; TextFile webRdbTable; TextFile fileRdbTable; if (agency == "USGS") { usgsRatingTable = new Reclamation.TimeSeries.Usgs.UsgsRatingTable(stationID); usgsRatingTable.CreateShiftAndFlowTablesFromWeb(); usgsRatingTable.CreateFullRatingTableFromWeb(); fullRatingTable = usgsRatingTable.fullRatingTable; webRdbTable = usgsRatingTable.webRdbTable; urlDownload = usgsRatingTable.downloadURL; } else if (agency == "OWRD") { var ratingTable = new Reclamation.TimeSeries.Owrd.OwrdRatingTables(stationID); fullRatingTable = ratingTable.fullRatingTable; webRdbTable = ratingTable.rawTable; urlDownload = ratingTable.downloadURL; } else if (agency == "IDPWR") { var ratingTable = new Reclamation.TimeSeries.IdahoPower.IdahoPowerRatingTables(cbtt); ratingTable.CreateFullRatingTableFromWeb(); fullRatingTable = ratingTable.fullRatingTable; webRdbTable = ratingTable.webRdbTable; urlDownload = ratingTable.downloadURL; } else { throw new Exception(cbtt.ToUpper() + "'s rating table from " + agency + " is not supported."); } // Create new RDB, files if the file does not currently exist if (!File.Exists(rdbFileName)) { stationUpdateList.Add(@"<a href=""" + urlDownload + @""">" + cbtt + " (" + agency + " " + stationID + ")</a> updated existing table"); if (agency == "USGS") { WriteHjAndQTables(shiftFileName, qFileName, usgsRatingTable); } WriteCsvFiles(fullRatingTable, cbtt); Console.WriteLine(" new table"); webRdbTable.SaveAs(rdbFileName); } // Check the existing file for updates else { // Get old RTF currently on file and copy it into temp. This is done to enable overwriting if the web file has been updated. fileRdbTable = GetRDBTableFromFile(cbtt, hydrometRTFs); // Compare var diff = (TextFile.Compare(fileRdbTable, webRdbTable).Length != 0); if (agency == "USGS") { diff = UsgsRatingTable.Diff(fileRdbTable, webRdbTable); } // Save new RTF file to repository and generate new HJ and Q tables if the file was updated if (diff || generateNewTables) { stationUpdateList.Add(@"<a href=""" + urlDownload + @""">" + cbtt + " (" + agency + " " + stationID + ")</a> updated existing table"); // Copy old RDB to _Attic and save new RDB to repository if (!generateNewTables) { fileRdbTable.SaveAs(Path.Combine(Path.Combine(hydrometRTFs, "_attic"), cbtt + DateTime.Now.ToString("_yyyy-MM-dd") + ".rdb"));//[JR] relies on the existence of an '_Attic' folder in the repository webRdbTable.SaveAs(rdbFileName); } if (agency == "USGS") { WriteHjAndQTables(shiftFileName, qFileName, usgsRatingTable); } WriteCsvFiles(fullRatingTable, cbtt); // Define which attachments to add to the mail message if the 'email' field in the input file is not blank if (email != "") { attachmentRecipients = email; attachments.Add(shiftFileName); attachments.Add(qFileName); } Console.WriteLine(" UPDATED"); } else { Console.WriteLine(" current"); } } //return urlDownload; }
private void ReadFromFile(string filename) { var tf = new TextFile(filename); Name = ReadStringToken(tf, "Name"); StartYear = Convert.ToInt32(ReadStringToken(tf, "StartYear")); EndYear = Convert.ToInt32(ReadStringToken(tf, "EndYear")); this.aveargeFlowStation = ReadStringToken(tf, "AverageRunoff"); this.coefficients = ReadCoefficients(tf); string pattern = @"(X|Y)[0-9]{1}[A-Za-z]?\s*,"; int idx = tf.IndexOfRegex(pattern); while (idx > 0) { var term = CreateTerm(tf, idx); term.MonthNames = ReadDateRange(tf, idx); var isYterm = tf[idx].IndexOf("Y") == 0; if (isYterm) { YTerm = (RunoffForecastTerm)term; } else { XTerms.Add(term); } idx++; while (idx < tf.Length && !Regex.IsMatch(tf[idx], pattern)) { if (tf[idx].Length > 0 && tf[idx].IndexOf("#") == 0 || tf[idx].IndexOf("\"#") == 0) {// comment line idx++; continue; } var tokens = CsvFile.ParseCSV(tf[idx]); if (tokens.Length <= 0) { break; } if (tokens[0].Trim() == "") { break; } term.siteNames.Add(tokens[0]); // remove any extra space between cbtt and pcode var tmp = TextFile.Split(tokens[1].Trim()); if (tmp.Length != 2) {// might be an equation... term.cbttPodes.Add(tokens[1].Trim()); } else { term.cbttPodes.Add(tmp[0].Trim().ToUpper() + " " + tmp[1].Trim().ToUpper()); } term.siteWeights.Add(Convert.ToDouble(tokens[2])); if (isYterm) {// y-term should not be weighted... put in 1's double[] w = Array.ConvertAll(term.MonthNames.ToArray(), s => 1.0); term.MonthlyWeights.Clear(); term.MonthlyWeights.AddRange(w); } else { term.MonthlyWeights = ReadDoubles(tokens, 3, term.MonthNames.Count); } idx++; } if (idx >= tf.Length) { break; } idx = tf.IndexOfRegex(pattern, idx); } }
private void ProcessFile(CsvFile csvFile, BlockingCollection <IReadOnlyList <DataSources.IObjectResolver> > chunkedSource) { var file = new FileInfo(csvFile.FilePath); if (!file.Exists) { chunkedSource.Add(new List <EntityResolver <object[]> >()); return; } var nameToIndexMap = new Dictionary <string, int>(); var indexToMethodAccess = new Dictionary <int, Func <object[], object> >(); var indexToNameMap = new Dictionary <int, string>(); var endWorkToken = _context.EndWorkToken; using (var stream = CreateStreamFromFile(file)) { using (var reader = new StreamReader(stream)) { SkipLines(reader, csvFile); using (var csvReader = new CsvReader(reader)) { csvReader.Configuration.Delimiter = csvFile.Separator; csvReader.Read(); var header = csvReader.Context.Record; for (var i = 0; i < header.Length; ++i) { var headerName = csvFile.HasHeader ? CsvHelper.MakeHeaderNameValidColumnName(header[i]) : string.Format(CsvHelper.AutoColumnName, i + 1); nameToIndexMap.Add(headerName, i); indexToNameMap.Add(i, headerName); var i1 = i; indexToMethodAccess.Add(i, row => row[i1]); } } } } using (var stream = CreateStreamFromFile(file)) { using (var reader = new StreamReader(stream)) { SkipLines(reader, csvFile); using (var csvReader = new CsvReader(reader)) { csvReader.Configuration.BadDataFound = context => { }; csvReader.Configuration.Delimiter = csvFile.Separator; int i = 1, j = 11; var list = new List <EntityResolver <object[]> >(100); var rowsToRead = 1000; const int rowsToReadBase = 100; if (csvFile.HasHeader) { csvReader.Read(); //skip header. } while (csvReader.Read()) { var rawRow = csvReader.Context.Record; list.Add(new EntityResolver <object[]>(ParseRecords(rawRow, indexToNameMap), nameToIndexMap, indexToMethodAccess)); if (i++ < rowsToRead) { continue; } i = 1; if (j > 1) { j -= 1; } rowsToRead = rowsToReadBase * j; chunkedSource.Add(list, endWorkToken); list = new List <EntityResolver <object[]> >(rowsToRead); } chunkedSource.Add(list, endWorkToken); } } } }
/// <summary> /// 序列化 /// </summary> public static void Serialize(MemoryStream stream, CsvFile csv) { // 初始化缓存 if (mSerBuffer == null) { mSerBuffer = new byte[1024 * 1024]; } FileMgr.WriteInt(stream, CsvFileMgr.Version); FileMgr.WriteString(stream, csv.Name); FileMgr.WriteString(stream, csv.primaryKey); // 写入主key类型 LPC.LPCValue.ValueType pkeyType = LPCValue.ValueType.INT; if (csv.rows.Length > 0) { var row = csv.rows[0]; var pkey = row.Query <LPCValue>(csv.primaryKey); pkeyType = pkey.type; FileMgr.WriteInt(stream, (int)pkeyType); } // 列名 FileMgr.WriteInt(stream, csv.columns.Count); foreach (var kv in csv.columns) { FileMgr.WriteString(stream, kv.Key); FileMgr.WriteInt(stream, kv.Value); } // 行数 FileMgr.WriteInt(stream, csv.rows.Length); // 写入每行的主key for (int i = 0; i < csv.rows.Length; i++) { var row = csv.rows[i]; if (pkeyType == LPCValue.ValueType.INT) { var pkey = row.Query <int>(csv.primaryKey); FileMgr.WriteInt(stream, pkey); } else { var pkey = row.Query <string>(csv.primaryKey); FileMgr.WriteString(stream, pkey); } } // 写入行长度和内容 for (int i = 0; i < csv.rows.Length; i++) { var row = csv.rows[i]; int len = 0; for (var idx = 0; idx < csv.columns.Count; idx++) { len += LPCValue.SaveToBuffer(mSerBuffer, len, row.properties[idx]); } FileMgr.WriteInt(stream, len); stream.Write(mSerBuffer, 0, len); } // 释放 mSerBuffer = null; }
/// <summary> /// Writes csv content to a stream /// </summary> /// <param name="csvFile">CsvFile</param> /// <param name="stream">Stream</param> /// <param name="encoding">Encoding</param> public void WriteCsv(CsvFile csvFile, Stream stream, Encoding encoding) { stream.Position = 0; _streamWriter = new StreamWriter(stream, encoding ?? Encoding.Default); WriteToStream(csvFile, _streamWriter); _streamWriter.Flush(); stream.Position = 0; }
private static void SortClientsByCity() { Console.WriteLine(); Console.WriteLine("Sorting clients into a file for each City"); int processed = 0; var files = new Dictionary<string, CsvFile<Client>>(); foreach (var c in CsvFile.Read<Client>("clients.csv")) { processed++; if (processed % 1000 == 0) Console.Write(string.Format("\r{0} clients sorted.", processed)); CsvFile<Client> csvFile; if (!files.TryGetValue(c.City ?? "Blank", out csvFile)) { csvFile = new CsvFile<Client>(c.City); files[c.City ?? "Blank"] = csvFile; } csvFile.Append(c); } foreach (var f in files.Values) f.Dispose(); Console.WriteLine(); }
public void CsvFile_Indexers() { CsvFile file = new CsvFile(); file.Populate(true, TEST_DATA_2); Assert.IsTrue(file[0] == file.Records[0]); Assert.IsTrue(string.Compare(file[0, 1], "data, 2") == 0); Assert.IsTrue(string.Compare(file[0, "column two"], "data, 2") == 0); }
public void CsvFile_PopulateFromStream() { using (MemoryStream memoryStream = new MemoryStream(TEST_DATA_5.Length)) { using (StreamWriter streamWriter = new StreamWriter(memoryStream)) { streamWriter.Write(TEST_DATA_5); streamWriter.Flush(); CsvFile file = new CsvFile(); file.Populate(memoryStream, true); VerifyTestData5(file.Headers, file.Records); } } }
public void CsvWriter_VerifyThatCarriageReturnsAreHandledCorrectlyInFieldValues() { CsvFile csvFile = new CsvFile(); csvFile.Headers.Add("header ,1"); csvFile.Headers.Add("header\r\n2"); csvFile.Headers.Add("header 3"); CsvRecord record = new CsvRecord(); record.Fields.Add("da,ta 1"); record.Fields.Add("\"data\" 2"); record.Fields.Add("data\n3"); csvFile.Records.Add(record); string content = string.Empty; using (CsvWriter writer = new CsvWriter()) { content = writer.WriteCsv(csvFile, Encoding.Default); } Assert.IsTrue(string.Compare(content, "\"header ,1\",\"header,2\",header 3\r\n\"da,ta 1\",\"\"\"data\"\" 2\",\"data,3\"\r\n") == 0); using (CsvWriter writer = new CsvWriter() { ReplaceCarriageReturnsAndLineFeedsFromFieldValues = false}) { content = writer.WriteCsv(csvFile, Encoding.Default); } Assert.IsTrue(string.Compare(content, "\"header ,1\",header\r\n2,header 3\r\n\"da,ta 1\",\"\"\"data\"\" 2\",data\n3\r\n") == 0); }
/// <summary> /// Writes csv content to a string /// </summary> /// <param name="csvFile">CsvFile</param> /// <param name="encoding">Encoding</param> /// <returns>Csv content in a string</returns> public string WriteCsv(CsvFile csvFile, Encoding encoding) { string content = string.Empty; using (MemoryStream memoryStream = new MemoryStream()) { using (StreamWriter writer = new StreamWriter(memoryStream, encoding ?? Encoding.Default)) { WriteToStream(csvFile, writer); writer.Flush(); memoryStream.Position = 0; using (StreamReader reader = new StreamReader(memoryStream, encoding ?? Encoding.Default)) { content = reader.ReadToEnd(); writer.Close(); reader.Close(); memoryStream.Close(); } } } return content; }
public ResultCreazioneFile GetIndirizzi(IList<PersonaComunicazioneDTO> persone) { try { var message = string.Empty; var csvFile = new CsvFile(); csvFile.Headers.Add("Nominativo"); csvFile.Headers.Add("Descrizione"); csvFile.Headers.Add("Indirizzo"); csvFile.Headers.Add("Note indirizzo"); csvFile.Headers.Add("Casella postale"); csvFile.Headers.Add("Ufficio postale"); csvFile.Headers.Add("CAP"); csvFile.Headers.Add("Comune"); csvFile.Headers.Add("Provincia"); csvFile.Headers.Add("Tipo indirizzo"); foreach (var dto in persone) { try { var persona = _daoFactory.GetPersonaDao().GetById(dto.ID, false); var indirizzo = persona.Recapito; if(dto.Indirizzo != null && dto.Indirizzo.RecapitoResidenza == TipoIndirizzo.Residenza) indirizzo = persona.IndirizzoResidenza; if (indirizzo == null || indirizzo.Comune == null) indirizzo = persona.IndirizzoResidenza; var rec = new CsvRecord(); rec.Fields.Add(persona.DisplayName); rec.Fields.Add(indirizzo.Presso); rec.Fields.Add(indirizzo.GetIndirizzoCompleto()); rec.Fields.Add(indirizzo.Localita); rec.Fields.Add(string.Empty); rec.Fields.Add(string.Empty); rec.Fields.Add(indirizzo.Cap); rec.Fields.Add(indirizzo.Comune.Descrizione); rec.Fields.Add(indirizzo.Comune.ProvinciaAppartenenza.Codice); rec.Fields.Add("F"); csvFile.Records.Add(rec); } catch (Exception ex) { _log.Fatal("Errore inaspettato durante la creazione del file per gli indirizzi per le poste - " + Library.Utility.GetMethodDescription() + " (Singola Persona) - persona:" + dto.ID, ex); throw; } } byte[] bytes; using (var memoryStream = new MemoryStream()) { using (var writer = new CsvWriter()) { writer.WriteCsv(csvFile, memoryStream); bytes = new byte[memoryStream.Length]; memoryStream.Seek(0, SeekOrigin.Begin); memoryStream.Read(bytes, 0, bytes.Length); memoryStream.Close(); } } return new ResultCreazioneFile(bytes, message); } catch (Exception ex) { _log.Fatal("Errore inaspettato durante la creazione del file per gli indirizzi per le poste - " + Library.Utility.GetMethodDescription(), ex); throw; } }
public void CsvFile_PopulateFromFileWithoutHeader() { CsvFile csvFile1 = new CsvFile(); using (CsvReader reader = new CsvReader(Encoding.Default, TEST_DATA_5)) { List<List<string>> records = new List<List<string>>(); while (reader.ReadNextRecord()) records.Add(reader.Fields); csvFile1 = CreateCsvFile(records[0], records[1]); } if (File.Exists(FilePath)) File.Delete(FilePath); using (CsvWriter writer = new CsvWriter()) { writer.WriteCsv(csvFile1, FilePath, Encoding.Default); } CsvFile file = new CsvFile(); file.Populate(FilePath, false); VerifyTestData5Alternative(file.Records); File.Delete(FilePath); }
/// <summary> /// Writes the Csv File /// </summary> /// <param name="csvFile">CsvFile</param> /// <param name="writer">TextWriter</param> private void WriteToStream(CsvFile csvFile, TextWriter writer) { if (csvFile.Headers.Count > 0) WriteRecord(csvFile.Headers, writer); csvFile.Records.ForEach(record => WriteRecord(record.Fields, writer)); }
public void CsvFile_PopulateFromString() { CsvFile file = new CsvFile(); file.Populate(true, TEST_DATA_5); VerifyTestData5(file.Headers, file.Records); }
/// <summary> /// Writes csv content to a file /// </summary> /// <param name="csvFile">CsvFile</param> /// <param name="filePath">File path</param> public void WriteCsv(CsvFile csvFile, string filePath) { WriteCsv(csvFile, filePath, null); }
public void CsvWriter_WriteCsvFileObjectToFile() { if (File.Exists(FilePath)) File.Delete(FilePath); CsvFile csvFile = new CsvFile(); csvFile.Populate(true, TEST_DATA_5); using (CsvWriter writer = new CsvWriter()) { writer.WriteCsv(csvFile, FilePath); } csvFile = new CsvFile(); csvFile.Populate(FilePath, true); VerifyTestData5(csvFile.Headers, csvFile.Records); File.Delete(FilePath); }
/// <summary> /// Fetches the campaign stats. /// </summary> /// <param name="user">The AdWords user.</param> /// <param name="startDate">The start date.</param> /// <param name="endDate">The end date.</param> /// <returns>A dictionary, with key as campaign ID, and value as the /// campaign details.</returns> public Dictionary <long, LocalCampaign> FetchCampaignStats(AdWordsUser user, string startDate, string endDate) { Dictionary <long, LocalCampaign> campaigns = new Dictionary <long, LocalCampaign>(); CsvFile csvFile = DownloadCampaignPerformanceReport(user, startDate, endDate); for (int i = 0; i < csvFile.Records.Count; i++) { var row = csvFile.Records[i]; long campaignId = long.Parse(row[0]); string campaignName = row[1]; long clicks = long.Parse(row[2]); long impressions = long.Parse(row[3]); long cost = long.Parse(row[4]); string searchBudgetLostIS = row[5]; string contentBudgetLostIS = row[6]; string searchIS = row[7]; string contentIS = row[8]; long totalBudget = long.Parse(row[9]); string network = row[10]; string averageCpc = row[11]; LocalCampaign campaign = null; if (!campaigns.TryGetValue(campaignId, out campaign)) { campaigns[campaignId] = campaign = new LocalCampaign() { BudgetAmount = totalBudget, CampaignId = campaignId, CampaignName = campaignName }; } CampaignStat campaignStat = null; campaignStat = campaign.Stats; if (network == "Display Network") { campaignStat.DisplayStats.Clicks = clicks; campaignStat.DisplayStats.Impressions = impressions; campaignStat.DisplayStats.Cost = cost; campaignStat.DisplayStats.BudgetLostImpressionShare = NormalizeImpressionShare(contentBudgetLostIS); campaignStat.DisplayStats.AverageCpc = long.Parse(averageCpc); campaignStat.DisplayStats.ImpressionShare = NormalizeImpressionShare(contentIS); } else { campaignStat.SearchStats.Clicks = clicks; campaignStat.SearchStats.Impressions = impressions; campaignStat.SearchStats.Cost = cost; campaignStat.SearchStats.BudgetLostImpressionShare = NormalizeImpressionShare(searchBudgetLostIS); campaignStat.SearchStats.AverageCpc = long.Parse(averageCpc); campaignStat.SearchStats.ImpressionShare = NormalizeImpressionShare(searchIS); } } return(campaigns); }
public void CsvWriter_WriteCsvFileObjectToString() { CsvFile csvFile = new CsvFile(); csvFile.Populate(true, TEST_DATA_5); string content = string.Empty; using (CsvWriter writer = new CsvWriter()) { content = writer.WriteCsv(csvFile, Encoding.Default); } Assert.IsTrue(string.Compare(content, TEST_DATA_5) == 0); }
/// <summary> /// Checks minimum flows for Sites in the Rogue /// The minumun flows are based on: /// 1) month of year /// 2) state of system (how much water in three reservoirs) /// 3) interval (instant or 7 day moving average) /// this criteria is stored in a file named after the site and parameter /// for example: emi_q.csv /// </summary> /// <returns></returns> public override bool Check(DateTime t) { this.t = t; string baseFileName = m_row.cbtt + "_" + m_row.pcode; var dir = GetPathToMinimumFlowFiles(); var minFlowFileName = Path.Combine(dir, baseFileName + ".csv"); string canalFileName = Path.Combine(dir, baseFileName + ".canal"); if (File.Exists(canalFileName)) { bool canalOn = IsCanalOn(canalFileName, t); if (!canalOn) {// if canal is dry minium flows don't apply Console.WriteLine("canal is dry minium flows don't apply at " + m_row.cbtt + " " + m_row.pcode); return(m_clearCondition); } } CsvFile csv = new CsvFile(minFlowFileName); GetSeriesWithData(false); m_series.RemoveMissing(); string month = t.ToString("MMMM"); var state = DetermineSystemState(t); string msg = "State (Wet, Median, or Dry) of the Rogue system is : " + state.ToString(); customMessageVariables.Add("%system_state", state.ToString()); Details += "\n" + msg; Details += "\nThe date is : " + t.ToLongDateString(); var rows = csv.Select("Month = '" + month + "'"); CheckForErrors(t, minFlowFileName, csv, month, state, rows); object o = rows[0][state.ToString()]; if (o == DBNull.Value) // no criteria this month { Details += "\nNo alert criteria this month"; return(m_clearCondition); // this can clear an alarm, but don't create an alarm } double limit = Convert.ToDouble(o); customMessageVariables.Add("%flow_target", o.ToString()); Details += "\nThe alert criteria is value < " + limit.ToString("F0"); foreach (var pt in m_series) { event_point = pt; if (pt.DateTime.ToString("MMMM") != month) { continue; // only check in current month since rates may change } if (m_clearCondition) { if (pt.Value > limit) { Details += "\nAlert clear condition found at :" + pt.ToString(true); return(true); // clear } } else { if (pt.Value < limit) { Details += "\nAlert condition found at :" + pt.ToString(true); return(true); // alert } } } return(false); }
private CsvFile CreateCsvFileFromDataTable(DataTable table) { CsvFile file = new CsvFile(); foreach (DataColumn column in table.Columns) file.Headers.Add(column.ColumnName); foreach (DataRow row in table.Rows) { CsvRecord record = new CsvRecord(); foreach (object o in row.ItemArray) { if (o is DateTime) record.Fields.Add(((DateTime)o).ToString("yyyy-MM-dd hh:mm:ss")); else record.Fields.Add(o.ToString()); } file.Records.Add(record); } return file; }
private void Act(CsvFile csvFile) { _sut.Write(_file, csvFile); }