/// <summary> /// Creates XSD schema for Hydromet MCF /// Loads MCF csv files into strongly typed DataSet /// </summary> public static void CreateMcfDataSet(string path) { DataSet ds = new DataSet("McfDataSet"); foreach (var item in tableNames) { string fn = Path.Combine(path, item + ".csv"); var csv = new CsvFile(fn); csv.TableName = item+"mcf"; ds.Tables.Add(csv); } ds.WriteXml(Path.Combine(path,"mcf.xml")); ds.WriteXmlSchema(Path.Combine(path, "mcf.xml")); }
public static Series FileLookupInterpolate2D(Series s1, Series s2, string fileName) { var rval = new Series(); if (s1.IsEmpty || s2.IsEmpty) { Logger.WriteLine("FileLookupInterpolate2D - input series empty"); return rval; } if (!File.Exists(fileName)) { Logger.WriteLine("FileLookupInterpolate2D - input fileName, file not found"); return rval; } CsvFile csv = new CsvFile(fileName, CsvFile.FieldTypes.AllText); foreach (var pt in s1) { Point point = pt; var s2_idx = s2.IndexOf(pt.DateTime); if (s2_idx < 0) { point.Value = Point.MissingValueFlag; } else { point.Value = Interpoloate2D(csv, pt.Value, s2[s2_idx].Value); } rval.Add(point); } return rval; }
public void CSVFile() { DataTable table = new DataTable(); table.Columns.Add("col, 1");// ouch!! comma in column name table.Columns.Add("col2"); table.Columns.Add("col3"); DataRow r = table.NewRow(); r[0] = "abc"; r[1] = "\"hi there,"; r[2] = "this, or that'"; table.Rows.Add(r); string fn = FileUtility.GetTempPath()+"\\csvtest.csv"; CsvFile.WriteToCSV(table, fn, true); //File.Delete(fn); CsvFile db = new CsvFile(fn); Assert.AreEqual(1, db.Rows.Count, "row count"); Assert.AreEqual(3, db.Columns.Count, "column count"); Assert.AreEqual(table.Columns[0].ColumnName, db.Columns[0].ColumnName); Assert.AreEqual(table.Columns[1].ColumnName, db.Columns[1].ColumnName); Assert.AreEqual(table.Columns[2].ColumnName, db.Columns[2].ColumnName); DataRow r2 = db.Rows[0]; Assert.AreEqual(r[0].ToString(), r2[0].ToString()); Assert.AreEqual(r[1].ToString(), r2[1].ToString()); Assert.AreEqual(r[2].ToString(), r2[2].ToString()); }
static void Main(string[] args) { Logger.EnableLogger(); if (args.Length < 2 || args.Length > 3 ) { Console.WriteLine("Usage: GetUsace site_list.csv hourly|daily [dump.pdb] "); Console.WriteLine("Where: site_list.csv is a catalog of sites to import"); Console.WriteLine(" houly or daily data"); Console.WriteLine(" dump.db creates a test pisces database for comparison to hydromet"); return; } FileUtility.CleanTempPath(); CsvFile csv = new CsvFile(args[0]); //interval,filename,cbtt,pcode,header1,header2,header3,header4,header5 //instant,gcl_h.dat,GCL,FB,Forebay,(ft),,, //instant,gcl_h.dat,GCL,TW,Tailwatr,(ft),,, //instant,gcl_h.dat,GCL,QE,Generatn,Flow,(kcfs),, TimeSeriesDatabase db=null; if (args.Length == 3) { SQLiteServer svr = new SQLiteServer(args[2]); db = new TimeSeriesDatabase(svr); } var rows = csv.Select("interval = '" + args[1] + "'"); var interval = TimeInterval.Daily; if( args[1] == "hourly") interval = TimeInterval.Hourly; Console.WriteLine("Processing "+rows.Length+" parameters"); for (int i = 0; i < rows.Length; i++) { var url = rows[i]["url"].ToString(); var cbtt = rows[i]["cbtt"].ToString(); var pcode = rows[i]["pcode"].ToString(); string[] headers = GetHeaders(rows[i]); var soffset = rows[i]["offset"].ToString(); double offset = 0; if (soffset.Trim() != "") { offset = double.Parse(soffset); } var s = ProcessFile(url,interval, cbtt, pcode,offset,true, headers); if (db != null) { SaveToDatabase(args, db, cbtt, pcode, s); } } }
static void Main(string[] args) { if( args.Length != 1) { Console.WriteLine("Usage: CreateWavFiles.exe file.csv" ); return; } var dir = Path.GetDirectoryName(args[0]); CsvFile csv = new CsvFile(args[0]); dir = Path.Combine(dir,"output"); // Initialize a new instance of the SpeechSynthesizer. using (SpeechSynthesizer synth = new SpeechSynthesizer()) { Console.WriteLine("using voice :"+synth.Voice.Name); foreach (var item in synth.GetInstalledVoices()) { //.. Console.WriteLine(item.VoiceInfo.Name); } for (int i = 0; i < csv.Rows.Count; i++) { // Set a value for the speaking rate. Slower to Faster (-10 to 10) synth.Rate = -3; // Configure the audio output. string outputWavFileName = Path.Combine(dir, csv.Rows[i]["File Name"].ToString()); Console.WriteLine(outputWavFileName); synth.SetOutputToWaveFile(outputWavFileName, new SpeechAudioFormatInfo(8000, AudioBitsPerSample.Sixteen, AudioChannel.Mono)); // Create a SoundPlayer instance to play output audio file. //System.Media.SoundPlayer m_SoundPlayer = new System.Media.SoundPlayer(outputWavFileName); // Build a prompt. PromptBuilder builder = new PromptBuilder(); builder.AppendText(csv.Rows[i]["Text"].ToString()); // Speak the prompt. synth.Speak(builder); //m_SoundPlayer.Play(); } } Console.WriteLine(); Console.WriteLine("Press any key to exit..."); Console.ReadKey(); }
/// <summary> /// Checks Current Rating Table files against the USGS website. /// Copies new USGS Rating Table if an update was made and moves the old Rating Table into an '_Attic' in the current repository /// before regenerating new HJ and Q tables /// </summary> /// <param name="generateNewTables">Override to regenerate HJ and Q tables from current USGS website rating tables</param> public static void UpdateRatingTables(string configFile, bool generateNewTables = false) { Console.WriteLine("reading " + configFile); var inputText = new CsvFile(configFile, CsvFile.FieldTypes.AllText); var stationUpdateList = new List<string>(); var attachments = new List<string>(); var errorMessages = new List<string>(); List<string> attachmentRecipientList = new List<string>(); // additional recipients for USGS attachemnts var cbtt = ""; // Loop through each row in the input text file for (int k = 0; k < inputText.Rows.Count; k++) { try { var dRow = inputText.Rows[k]; cbtt = dRow["cbtt"].ToString(); string attachmentRecipients = ""; UpdatesingleRatingTable(dRow, generateNewTables,stationUpdateList, attachments, out attachmentRecipients); if (attachmentRecipients != "" && !attachmentRecipientList.Contains(attachmentRecipients) ) attachmentRecipientList.Add(attachmentRecipients); } catch (Exception e) { string msg = "Error: processing table "+cbtt+" \n"+ e.Message; errorMessages.Add(msg); Console.WriteLine(msg); } } // Send out e-mail notifications string subject = "Daily Rating Table Update Results " + DateTime.Now.ToString("MM-dd-yyyy"); if (stationUpdateList.Count > 0 || errorMessages.Count >0) { var emailMsg = "Daily Rating Table Update Results " + DateTime.Now.ToString("MM-dd-yyyy") + ": " + stationUpdateList.Count + " rating tables were updated.<br>"; foreach (var item in stationUpdateList) { emailMsg += item + "<br>"; } foreach (var item in errorMessages) { emailMsg += item + "<br>"; } SendEmail(subject, emailMsg, attachments, attachmentRecipientList); } }
/// <summary> /// Loads xml version of MCF /// </summary> /// <param name="svr"></param> /// <returns></returns> public static McfDataSet GetDataSetFromCsvFiles( ) { Performance p = new Performance(); var ds = new McfDataSet(); // ds.EnforceConstraints = false; foreach (var item in tableNames) { var fn = FileUtility.GetFileReference(item + ".csv"); if (!File.Exists(fn)) { Logger.WriteLine("Error: file missing '" + fn + "'"); continue; } var csv = new CsvFile(fn); if( item == "site") FixLatLong(csv); // yakima has two extra columns. //if (item == "pcode" && csv.Columns.Contains("TAGTYPE")) // csv.Columns.Remove("TAGTYPE"); //if (item == "pcode" && csv.Columns.Contains("LOGORDER")) // csv.Columns.Remove("LOGORDER"); csv.TableName = item + "mcf"; try { ds.Merge(csv,true, MissingSchemaAction.Ignore); } catch (ConstraintException ce) { Console.WriteLine("Error in table "+item +"\n "+ce.Message); PrintTableErrors(csv); } } return ds; }
private static double GetValue(DateTime t1, double wesw, double nscw) { double rval = 200.0; var fn = Path.Combine(FileUtility.GetExecutableDirectory(),"YakimaOthersAboveParker.csv"); CsvFile csv = new CsvFile(fn, CsvFile.FieldTypes.AutoDetect); for (int i = 0; i < csv.Rows.Count; i++) { var r = csv.Rows[i]; var d = Convert.ToDateTime(r["DateTime"]); if (t1.Month == d.Month && t1.Day == d.Day) { double a1 =Convert.ToDouble( r[1]); double a2 = Convert.ToDouble(r[2]); double a3 = Convert.ToDouble(r[3]); double a4 = Convert.ToDouble(r[4]); double a5 = Convert.ToDouble(r[5]); rval = (a1 + a2) * wesw + (a3 + a4 + a5) * nscw; } } return System.Math.Max(200, rval); }
private static void FixLatLong(CsvFile csv) { for (int i = 0; i < csv.Rows.Count; i++) { var r = csv.Rows[i]; if (r["LAT"] == DBNull.Value) r["LAT"] = "0"; if (r["LONG"] == DBNull.Value) r["LONG"] = "0"; } }
/// <summary> /// reads USGS instant streamflow data and saves in Hydromet format /// </summary> /// <param name="args"></param> static void Main(string[] args) { if (args.Length != 2) { Console.WriteLine("Usage: ImportUsgs site_list.csv hoursBack"); return; } int hoursBack = Convert.ToInt32(args[1]); Point.MissingValueFlag = 998877; bool errors = false; CsvFile csv = new CsvFile(args[0], CsvFile.FieldTypes.AllText); foreach (DataRow row in csv.Rows) { var interval = GetInterval(row); string site_id = row["site_id"].ToString(); string usgs_parameter = row["usgs_parameter"].ToString(); string pcode = row["pcode"].ToString(); string cbtt = row["cbtt"].ToString(); Series s; Console.WriteLine(cbtt); if (interval == TimeInterval.Daily) { if (usgs_parameter.ToLower() == "gageheight") s = new UsgsDailyValueSeries(site_id, UsgsDailyParameter.DailyMeanDischarge); else { Logger.WriteLine("Error: the parameter '"+usgs_parameter+"' is not yet supported"); s = new Series(); } s.Read(DateTime.Now.AddHours(-hoursBack), DateTime.Now); TimeSeriesRouting.RouteDaily(s, cbtt, pcode, RouteOptions.Outgoing); } else if( interval == TimeInterval.Irregular) { if (usgs_parameter.ToLower() == "watertemp") s = new UsgsRealTimeSeries(site_id, UsgsRealTimeParameter.Temperature); else if (usgs_parameter.ToLower() == "gageheight") s = new UsgsRealTimeSeries(site_id, UsgsRealTimeParameter.GageHeight); else if (usgs_parameter.ToLower() == "discharge") s = new UsgsRealTimeSeries(site_id, UsgsRealTimeParameter.Discharge); else { Logger.WriteLine("Error: the parameter '" + usgs_parameter + "' is not yet supported"); s = new Series(); } try { s.Read(DateTime.Now.AddHours(-hoursBack), DateTime.Now); if (usgs_parameter.ToLower() == "watertemp" && pcode.ToLower() == "wf") { //(°C × 9/5) + 32 = °F s = s * 9.0 / 5.0 + 32.0; } s.RemoveMissing(); if( s.Count >0) TimeSeriesRouting.RouteInstant(s, cbtt, pcode, RouteOptions.Outgoing); } catch(Exception e) { errors = true; Console.WriteLine(e.Message); } } } if (errors) throw new Exception("Error reading one or more sites"); }
/// <summary> /// Reads data into a strongly typed DataTable /// </summary> /// <param name="table"></param> public static void ReadIntoTable(string filename, DataTable table) { string[] dataTypes = table.Columns.Cast<DataColumn>().Select(x => x.DataType.ToString()).ToArray(); CsvFile csv = new CsvFile(filename,dataTypes); foreach (DataRow csvRow in csv.Rows) { var myDataRow = table.NewRow(); myDataRow.ItemArray = csvRow.ItemArray; table.Rows.Add(myDataRow); } }
/// <summary> /// Saves contents of DataTable to comma seperated file. /// TO DO: check for internal commas, in string types. /// this is a test using DataView to preserver order /// of a sorted DataView /// </summary> public static void Write(DataTable table, string filename, bool WriteSchema, bool ConsoleOutput) { if (table == null) { return; } DataView view = table.DefaultView; StreamWriter sr = new StreamWriter(filename, false); int sz = view.Count; int cols = view.Table.Columns.Count; bool[] IsStringColumn = new Boolean[cols]; int c; for (c = 0; c < cols; c++) { if (c < cols - 1) { sr.Write(CsvFile.EncodeCSVCell(table.Columns[c].ColumnName.Trim()) + ","); if (ConsoleOutput) { Console.Write(table.Columns[c].ColumnName.Trim() + ","); } } else { sr.WriteLine(CsvFile.EncodeCSVCell(table.Columns[c].ColumnName.Trim())); // no comma on last if (ConsoleOutput) { Console.WriteLine(table.Columns[c].ColumnName.Trim()); // no comma on last } } if (table.Columns[c].DataType.ToString() == "System.String") { IsStringColumn[c] = true; } } if (WriteSchema && cols > 0) { for (c = 0; c < cols - 1; c++) { sr.Write(table.Columns[c].DataType); sr.Write(","); if (ConsoleOutput) { Console.Write(table.Columns[c].DataType); Console.Write(","); } } sr.Write(table.Columns[c].DataType); // no comma on last sr.WriteLine(); if (ConsoleOutput) { Console.Write(table.Columns[c].DataType); // no comma on last Console.WriteLine(); } } for (int r = 0; r < sz; r++) { for (c = 0; c < cols; c++) { if (IsStringColumn[c]) { sr.Write("\"" + view[r][c] + "\""); if (ConsoleOutput) { Console.Write("\"" + view[r][c] + "\""); } } else { sr.Write(view[r][c]); if (ConsoleOutput) { Console.Write(view[r][c]); } } if (c < cols - 1) { sr.Write(","); if (ConsoleOutput) { Console.Write(","); } } } sr.WriteLine(); if (ConsoleOutput) { Console.WriteLine(); } } sr.Close(); }
public static void Write(CsvFile table, string filename) { bool sizeInfo = true; for (int i = 0; i < table.widths.Length; i++) { if (table.widths[i] == 0) sizeInfo = false; } Write(table, filename, sizeInfo); }
/// <summary> /// Reads Raw RBMS Text file. Adds DateTime and Value Columns /// </summary> /// <param name="filename"></param> /// <returns></returns> private static DataTable ReadRBMSFile(string filename) { TextFile tf = new TextFile(filename); int fieldCount = 0; if (tf.FileData.Length > 0) { fieldCount = tf[0].Split(',').Length; if (fieldCount < 4 || fieldCount > 5) { throw new Exception("Error: " + filename + "is not a valid RBMS File. It should have 4 or 5 columns of data"); } } else { throw new Exception("Error: There was not data in the input file"); } tf = null; CsvFile raw = null; if (fieldCount == 5) { string[] dataTypes = new string[] { "String", "String", "String", "String", "String" }; string[] fieldNames = new string[] { "DH", "Riser", "Date", "Time", "Measured" }; //DH-438-RS , 1,01-apr-2000,00:00:54 , 870.290 //string pattern = @"\s*[A-Za-z\-0-9]{3,}\s*,\s*\d+,\d{1,2}-[A-Za-z]{3}-\d{4},\d{2}:\d{2}:\d{2}\s*,\s*[0-9\.\-\+Ee]+"; raw = new CsvFile(filename, dataTypes, fieldNames); } else { string[] dataTypes = new string[] { "String", "String", "String", "String" }; string[] fieldNames = new string[] { "DH", "Riser", "Date", "Measured" }; //DH-259-RS , 1,01-apr-1991 00:00:32 , 866.960 //string pattern = @"\s*[A-Za-z\-0-9]{3,}\s*,\s*\d+,\d{1,2}-[A-Za-z]{3}-\d{4}\s\d{2}:\d{2}(:\d{2})?\s*,\s*[0-9\.\-\+Ee]+"; raw = new CsvFile(filename, dataTypes, fieldNames); } return CreateDateValueTableForDatabase(raw); // format with date time together. //DH-259-RS , 1,01-apr-1991 00:00:32 , 866.960 // format with date time csv. //DH-438-RS , 1,01-apr-2000,00:00:54 , 870.290 //Console.WriteLine(db.Rows.Count); // return raw; }
/// <summary> /// Saves contents of CsvFile to a comma seperated file. /// TO DO: check for internal commas, in string types. /// </summary> /// <param name="filename"></param> private static void Write(CsvFile table, string filename, bool sizeInfo) { if (debugOutput) { Console.Write("Writing to " + filename); } StreamWriter sr = new StreamWriter(filename, false); int sz = table.Rows.Count; int cols = table.Columns.Count; bool[] IsStringColumn = new bool[cols]; int c; for (c = 0; c < cols; c++) { if (c < cols - 1) { sr.Write(table.Columns[c].ColumnName.Trim() + ","); } else { sr.WriteLine(table.Columns[c].ColumnName.Trim()); // no comma on last } if (table.Columns[c].DataType.ToString() == "System.String") { IsStringColumn[c] = true; } } for (c = 0; c < cols - 1; c++) { sr.Write(table.Columns[c].DataType); if (sizeInfo) { sr.Write(":" + table.widths[c] + "." + table.decimals[c]); } sr.Write(","); } sr.Write(table.Columns[c].DataType); // no comma on last if (sizeInfo) { sr.Write(":" + table.widths[c] + "." + table.decimals[c]); } sr.WriteLine(); for (int r = 0; r < sz; r++) { for (c = 0; c < cols; c++) { if (IsStringColumn[c]) { sr.Write("\"" + table.Rows[r][c] + "\""); } else { sr.Write(table.Rows[r][c]); } if (c < cols - 1) { sr.Write(","); } } sr.WriteLine(); } sr.Close(); Console.WriteLine(" done."); }
private void ParseFile(string[] dataTypes, string[] fieldNames, int lineIndexToBeginRead, string pattern) { if (debugOutput) { Console.WriteLine("reading " + this.Filename); } if (lines.Length <= 0 || lineIndexToBeginRead >= lines.Length) { return; } DataRow myDataRow; widths = new int[dataTypes.Length]; decimals = new int[dataTypes.Length]; Debug.Assert(fieldNames.Length == dataTypes.Length, "Error: in " + "'" + Filename + "'", "line 1:" + lines[0] + "\n" + "line 2:" + lines[1]); int patternMatchFailureCount = 0; //int i; CreateColumns(dataTypes, fieldNames); int col = 0; int row = lineIndexToBeginRead; DataRowIO io = new DataRowIO(true); try { for (; row < lines.Length; row++) { if (lines[row].Trim() == "") { continue; } if (pattern != "" && !Regex.IsMatch(lines[row], pattern)) { patternMatchFailureCount++; continue; } string[] val = CsvFile.ParseCSV(lines[row]); myDataRow = this.NewRow(); for (col = 0; col < fieldNames.Length; col++) { if (col >= val.Length) { myDataRow[col] = DBNull.Value; continue; } switch (dataTypes[col]) { case "System.Int32": io.SaveInt(myDataRow, fieldNames[col], val[col]); break; case "System.Int16": io.SaveInt(myDataRow, fieldNames[col], val[col]); break; case "System.Integer": io.SaveInt(myDataRow, fieldNames[col], val[col]); break; case "System.Double": io.SaveDouble(myDataRow, fieldNames[col], val[col]); break; case "System.Single": io.SaveFloat(myDataRow, fieldNames[col], val[col]); break; case "System.String": myDataRow[col] = val[col]; break; case "System.DateTime": myDataRow[col] = Convert.ToDateTime(val[col]); break; case "System.Boolean": if (val[col].ToString() == "") { myDataRow[col] = DBNull.Value; } else { if (val[col].ToString() == "0") { myDataRow[col] = false; } else if (val[col].ToString() == "1") { myDataRow[col] = true; } else { myDataRow[col] = Convert.ToBoolean(val[col]); } } break; default: Console.WriteLine("Invalid selection : '" + dataTypes[col] + "'"); break; } } this.Rows.Add(myDataRow); } } catch (Exception ex) { Console.WriteLine(ex.Message); if (lines.Length < row) { Console.WriteLine("'" + lines[row] + "'"); } throw new Exception("Error reading data at row " + row + " and column " + col + " file name = " + this.Filename); } if (patternMatchFailureCount > 0) { Logger.WriteLine(patternMatchFailureCount + " lines skipped that did not match pattern " + pattern); } if (io.Messages.Trim().Length > 0) { Logger.WriteLine(io.Messages); } }
/// <summary> /// Saves contents of CsvFile to a comma seperated file. /// TO DO: check for internal commas, in string types. /// </summary> /// <param name="filename"></param> private static void Write(CsvFile table, string filename, bool sizeInfo) { if (debugOutput) Console.Write("Writing to " + filename); StreamWriter sr = new StreamWriter(filename, false); int sz = table.Rows.Count; int cols = table.Columns.Count; bool[] IsStringColumn = new bool[cols]; int c; for (c = 0; c < cols; c++) { if (c < cols - 1) sr.Write(table.Columns[c].ColumnName.Trim() + ","); else sr.WriteLine(table.Columns[c].ColumnName.Trim()); // no comma on last if (table.Columns[c].DataType.ToString() == "System.String") IsStringColumn[c] = true; } for (c = 0; c < cols - 1; c++) { sr.Write(table.Columns[c].DataType); if (sizeInfo) { sr.Write(":" + table.widths[c] + "." + table.decimals[c]); } sr.Write(","); } sr.Write(table.Columns[c].DataType); // no comma on last if (sizeInfo) { sr.Write(":" + table.widths[c] + "." + table.decimals[c]); } sr.WriteLine(); for (int r = 0; r < sz; r++) { for (c = 0; c < cols; c++) { if (IsStringColumn[c]) { sr.Write("\"" + table.Rows[r][c] + "\""); } else { sr.Write(table.Rows[r][c]); } if (c < cols - 1) sr.Write(","); } sr.WriteLine(); } sr.Close(); Console.WriteLine(" done."); }
/// <summary> /// update or insert new records in sitecatlog /// </summary> /// <param name="db"></param> /// <param name="csvFileName">input csv sitecatalog</param> private static void UpdateSiteCatalog(TimeSeriesDatabase db, string csvFileName) { var scRWIS = db.GetSiteCatalog(); var scLocal = new CsvFile(csvFileName, CsvFile.FieldTypes.AllText); for (int i = 0; i < scLocal.Rows.Count; i++) { var localRow = scLocal.Rows[i]; var siteID = localRow["siteid"].ToString(); //check for existing record var rwisRows = scRWIS.Select("siteid='" + siteID + "'"); if( rwisRows.Length == 1) {// update Console.WriteLine("updating record: " + siteID); var rwisRow = rwisRows[0]; for (int c = 0; c < scRWIS.Columns.Count; c++) { var cn = scRWIS.Columns[c].ColumnName; if (cn == "siteid") continue; if ( cn == "vertical_accuracy") // float { double x = 0; double.TryParse(localRow[cn].ToString(), out x); rwisRow[cn] = x; continue; } rwisRow[cn] = localRow[cn]; } } else {// insert Console.WriteLine("new record: "+siteID); double vertical_accuracy = 0; double latitude = 0; double longitude = 0; double elevation = 0; double.TryParse(localRow["vertical_accuracy"].ToString(),out vertical_accuracy); double.TryParse(localRow["latitude"].ToString(), out latitude); double.TryParse(localRow["longitude"].ToString(), out longitude); double.TryParse(localRow["elevation"].ToString(), out elevation); scRWIS.AddsitecatalogRow(localRow["siteid"].ToString(), localRow["description"].ToString(), localRow["state"].ToString(), latitude, longitude, elevation, localRow["timezone"].ToString(), localRow["install"].ToString(), localRow["horizontal_datum"].ToString(), localRow["vertical_datum"].ToString(), vertical_accuracy, localRow["elevation_method"].ToString(), localRow["tz_offset"].ToString(), localRow["active_flag"].ToString(), localRow["type"].ToString(), localRow["responsibility"].ToString(), localRow["agency_region"].ToString()); } } db.Server.SaveTable(scRWIS); }
public void EstimateColumnTypes() { string content = "txt, notint, double,bool,int\n" + "text,12, 22.4,TRUE,1\n" + "text2,1.2, 22.4,TRUE,2"; string filename= Path.GetTempFileName(); File.WriteAllText(filename, content); CsvFile db = new CsvFile(filename); Assert.AreEqual( "System.String" ,db.Columns[0].DataType.ToString(),"col1 "); Assert.AreEqual( "System.Double" ,db.Columns[1].DataType.ToString(), "col2 "); Assert.AreEqual( "System.Double" ,db.Columns[2].DataType.ToString(), "col3 "); Assert.AreEqual( "System.Boolean",db.Columns[3].DataType.ToString(), "col4 "); Assert.AreEqual( "System.Int32" ,db.Columns[4].DataType.ToString(), "col5 "); Assert.AreEqual(2, db.Rows.Count,"row count"); }
/// <summary> /// Imports site catalog from CSV file /// requies same format as created from Export command /// This is called on newly created 'empty' database. /// </summary> /// <param name="filename"></param> public void ImportCsvDump(string filename, bool importSeriesData) { var sc = GetSeriesCatalog("isfolder = 0"); var tableNames = m_server.TableNames(); foreach (var item in sc) { //Server.TableExists(item.TableName); if(Array.IndexOf(tableNames, item.TableName )>=0) DropTable(item.TableName); } m_server.RunSqlCommand("delete from seriescatalog"); m_server.RunSqlCommand("delete from sitecatalog"); string dir = Path.GetDirectoryName(filename); sc = GetSeriesCatalog(); CsvFile oldCatalog = new CsvFile(filename); //sc.Constraints.Add("pk_sdi", sc.Columns["id"], true); string[] oldColumnNames = { "sitedatatypeid", "sitename", "source" }; string[] newColumnName = { "id", "siteid", "iconname" }; for (int i = 0; i < oldCatalog.Rows.Count; i++) { var newRow = sc.NewRow(); for (int c = 0; c < sc.Columns.Count; c++) { string new_cn = sc.Columns[c].ColumnName; string old_cn = new_cn; int idx = oldCatalog.Columns.IndexOf(new_cn); if (idx < 0) // look for mapping { idx = Array.IndexOf(newColumnName, new_cn); if (idx >= 0) { old_cn = oldColumnNames[idx]; } else {// skip this column continue; } } newRow[new_cn] = oldCatalog.Rows[i][old_cn]; } sc.Rows.Add(newRow); } m_server.SaveTable(sc); sc = GetSeriesCatalog(); if (importSeriesData) { for (int i = 0; i < sc.Rows.Count; i++) { object o = sc.Rows[i]["TableName"]; if (o != DBNull.Value && o.ToString().Trim().Length > 0) { // check if file exists (same as table name with .csv extension) string fn = Path.Combine(dir, Path.ChangeExtension(o.ToString().Trim(), ".csv")); if (File.Exists(fn)) { // read file into datatable. CsvFile tbl = new CsvFile(fn); if (tbl.Rows.Count == 0) continue; //tbl.TableName = GetUniqueTableName(Path.GetFileNameWithoutExtension(fn)); tbl.TableName = SafeTableName(Path.GetFileNameWithoutExtension(fn)); sc.Rows[i]["TableName"] = tbl.TableName; if (tbl.Columns.Count == 2 || tbl.Columns.Count == 3) { // create/save var sr = GetNewSeriesRow(false); sr.ItemArray = sc.Rows[i].ItemArray; ImportTimeSeriesTable(tbl, sr, DatabaseSaveOptions.Insert); } else { Logger.WriteLine("Skipped file '" + fn + "'"); } } } } m_server.SaveTable(sc); } // TO DO // import/export ScenarioTable. // import/export PiscesInfo table OnDatabaseChanged(new object[] { }); //this.OnStructureChanged(new TreePathEventArgs(TreePath.Empty)); }
static void Main(string[] args) { if( args.Length != 2) { Console.WriteLine("usage: USFOShifts shift.csv oldshift.csv"); return; } List<string> recipients = new List<string>(); string idwrFile = "shifts.html"; string cleanFile = args[0]; string oldFile = args[1]; string[] cbtt = {"AFCI","BFCI","BMCI","CBCI","CRCI","ELCI","ENTI","GWCI","IDCI","LABI","LPPI", "MIII","MLCI","MPCI","NMCI","OSCI","PLCI","RECI","RSDI","SMCI","SNDI","TCNI", "TRCI","WACI"}; //would store the old csv file in the attic and check it against yesterdays shifts if (File.Exists(cleanFile)) { var str = File.ReadAllText(cleanFile); File.WriteAllText(oldFile, str); File.Delete(cleanFile); } // This is for testing we would get a new html each time we check for a new shift //if ( !File.Exists(idwrFile)) Web.GetFile("http://www.waterdistrict1.com/SHIFTS.htm", idwrFile); string html = File.ReadAllText(idwrFile); Console.WriteLine("input html is " + html.Length + " chars"); html = Web.CleanHtml(html); File.WriteAllText("stage1.txt",html); html = ConvertHtmlTableToCsv(html); html = ConvertCSVToShiftFormat(html, cbtt); File.WriteAllText(cleanFile, html); Console.WriteLine("cleaned html is " + html.Length + " chars"); Console.WriteLine(cleanFile); //Compare files and add shift into pisces var csvNew = new CsvFile(cleanFile, CsvFile.FieldTypes.AutoDetect); CsvFile csvOld; if (!File.Exists(oldFile)) { var tmp = new List<string>(); var x = File.ReadAllLines(cleanFile); tmp.Add(x[0]); File.WriteAllLines(oldFile, tmp.ToArray()); } csvOld = new CsvFile(oldFile, CsvFile.FieldTypes.AutoDetect); string emailMsg = "Updates have been made to the following shifts: "; for (int i = 0; i < cbtt.Length; i++) { var tblNew = DataTableUtility.Select(csvNew, "cbtt='" + cbtt[i] + "'", "date_measured"); var tblOld = DataTableUtility.Select(csvOld, "cbtt='" + cbtt[i] + "'", "date_measured"); if(tblNew.Rows.Count > 0) { var shftNew = tblNew.Rows[tblNew.Rows.Count - 1]["shift"].ToString(); var dateMeasured = tblNew.Rows[tblNew.Rows.Count - 1]["date_measured"].ToString(); double? discharge = null; var q = 0.0; if (double.TryParse(tblNew.Rows[tblNew.Rows.Count - 1]["discharge"].ToString(), out q)) discharge = q; var gh1 = 0.0; double? gh = null; if (double.TryParse(tblNew.Rows[tblNew.Rows.Count - 1]["stage"].ToString(), out gh1)) gh = gh1; if (tblOld.Rows.Count > 0) { var shftOld = tblOld.Rows[tblOld.Rows.Count - 1]["shift"].ToString(); if (shftNew != shftOld && shftNew != "") { InsertShiftToPostgres(cbtt[i], "ch", Convert.ToDouble(shftNew), dateMeasured, discharge, gh); emailMsg = emailMsg + cbtt[i] + " applied a shift of " + shftNew + ", "; } } else if (shftNew != "") { InsertShiftToPostgres(cbtt[i], "ch", Convert.ToDouble(shftNew), dateMeasured, discharge, gh); emailMsg = emailMsg + cbtt[i] + " applied a shift of " + shftNew + ", "; } } } if (emailMsg.Contains("applied")) { // check who needs to be included on email if (emailMsg.Contains("MIII") || emailMsg.Contains("MLCI") || emailMsg.Contains("TCNI")) { recipients.Add("*****@*****.**"); } if (emailMsg.Contains("NMCI")) { recipients.Add("*****@*****.**"); } if (emailMsg.Contains("SMCI")) { recipients.Add("*****@*****.**"); } Console.WriteLine("found shifts. Sending email "); SendEmail("IDWR Shift Update", emailMsg, recipients); } else { Console.WriteLine("No shift changes found"); } }