/// <summary> /// given a MODSIM output column name (in the access database) /// returns what table to read in modsim. /// </summary> private string ModsimTableName(string columnName) { string tableName; if (columnName == "Hydro_State_Res") { return(tableName = "RESOutput"); } if (columnName == "Hydro_State_Dem") { return(tableName = "DemOutput"); } if (m_outputTablesInfo == null) { m_outputTablesInfo = AccessDB.ReadTable(m_databaseName, "OutputTablesInfo"); } string sql = "OutputName = '" + columnName + "'"; DataTable tbl1 = DataTableUtility.Select(m_outputTablesInfo, sql, ""); if (tbl1.Rows.Count == 0) { throw new Exception("Error: A column named '" + columnName + "' could not be found in the modsim output file: " + this.m_databaseName); } tableName = tbl1.Rows[0]["Object"].ToString(); Console.WriteLine(tableName); Logger.WriteLine("tableName = " + tableName); return(tableName); }
/// <summary> /// Extracts DateTime[] used for label positions /// </summary> /// <param name="curveName"></param> /// <returns>list of dates using year 2000</returns> public static DateTime[] GetVariableForecastLabelDates(string curveName) { var rval = new List <DateTime>(); // var tbl = ExcelUtility.Read(xlsFileName(), "VariableForecastLevels"); var csv = new CsvFile(LookupFile("VariableForecastLevels")); var tbl = DataTableUtility.Select(csv, " RuleCurve = '" + curveName + "'", ""); //stored in excel as single string for example: /// 6/15,5/15,5/15,4/15,4/15, 3/15, 2/15, 1/15 if (tbl.Rows.Count > 0) { var s = tbl.Rows[0][2].ToString(); var tokens = s.Split(','); for (int i = 0; i < tokens.Length; i++) { var x = tokens[i].Split('/'); if (x.Length != 2) { break; } int m, d; if (int.TryParse(x[0], out m) && int.TryParse(x[1], out d)) { rval.Add(new DateTime(2000, m, d)); } } } return(rval.ToArray()); }
public static string[] GetNames() { DataTable tbl = new CsvFile(LookupFile("ControlPoints")); tbl = DataTableUtility.Select(tbl, " Enabled = true", ""); return(DataTableUtility.Strings(tbl, "", "Name")); }
public static string[] GetNames() { var tbl = ExcelUtility.Read(xlsFileName(), "ControlPoints"); tbl = DataTableUtility.Select(tbl, " Enabled = true", ""); return(DataTableUtility.Strings(tbl, "", "Name")); }
internal static System.Data.DataTable ControlPointTableFromName(string text, string lookupColumnName) { var tbl = ExcelUtility.Read(xlsFileName(), "ControlPoints"); tbl = DataTableUtility.Select(tbl, lookupColumnName + " = '" + text + "'", ""); return(tbl); }
internal static System.Data.DataTable ControlPointTableFromName(string text, string lookupColumnName) { // var tbl = ExcelUtility.Read(xlsFileName(), "ControlPoints"); DataTable tbl = new CsvFile(LookupFile("ControlPoints"), CsvFile.FieldTypes.AllText); tbl = DataTableUtility.Select(tbl, lookupColumnName + " = '" + text + "'", ""); return(tbl); }
public static string[] LookupMonthlyInventory(string cbtt) { List <string> rval = new List <string>(); var tbl = DataTableUtility.Select(MonthlyInventory, "Cbtt='" + cbtt + "'", "cbtt,pcode"); var pcodeList = DataTableUtility.StringList(DataTableUtility.SelectDistinct(tbl, "pcode"), "", "pcode"); foreach (var pc in pcodeList) { string line = cbtt.PadRight(12) + " " + pc.PadRight(9); var rows = tbl.Select("Cbtt='" + cbtt + "' and pcode='" + pc + "'"); for (int i = 0; i < rows.Length; i++) { line += rows[i]["years"].ToString() + " "; } rval.Add(line); } return(rval.ToArray()); }
public static double[] GetVariableForecastLevels(string curveName) { var rval = new List <double>(); var tbl = ExcelUtility.Read(xlsFileName(), "VariableForecastLevels"); tbl = DataTableUtility.Select(tbl, " RuleCurve = '" + curveName + "'", ""); if (tbl.Rows.Count > 0) { var s = tbl.Rows[0][1].ToString(); var tokens = s.Split(','); for (int i = 0; i < tokens.Length; i++) { double v = 0; if (double.TryParse(tokens[i], out v)) { rval.Add(v); } } } return(rval.ToArray()); }
static void Main(string[] args) { if (args.Length != 2) { Console.WriteLine("usage: USFOShifts shift.csv oldshift.csv"); return; } List <string> recipients = new List <string>(); string idwrFile = "shifts.html"; string cleanFile = args[0]; string oldFile = args[1]; string[] cbtt = File.ReadAllLines("site_list.txt"); //would store the old csv file in the attic and check it against yesterdays shifts if (File.Exists(cleanFile)) { var str = File.ReadAllText(cleanFile); File.WriteAllText(oldFile, str); File.Delete(cleanFile); } // This is for testing we would get a new html each time we check for a new shift //if ( !File.Exists(idwrFile)) Web.GetFile("http://www.waterdistrict1.com/SHIFTS.htm", idwrFile); string html = File.ReadAllText(idwrFile); Console.WriteLine("input html is " + html.Length + " chars"); html = Web.CleanHtml(html); File.WriteAllText("stage1.txt", html); html = ConvertHtmlTableToCsv(html); html = ConvertCSVToShiftFormat(html, cbtt); File.WriteAllText(cleanFile, html); Console.WriteLine("cleaned html is " + html.Length + " chars"); Console.WriteLine(cleanFile); //Compare files and add shift into pisces var csvNew = new CsvFile(cleanFile, CsvFile.FieldTypes.AutoDetect); CsvFile csvOld; if (!File.Exists(oldFile)) { var tmp = new List <string>(); var x = File.ReadAllLines(cleanFile); tmp.Add(x[0]); File.WriteAllLines(oldFile, tmp.ToArray()); } csvOld = new CsvFile(oldFile, CsvFile.FieldTypes.AutoDetect); string emailMsg = "Updates have been made to the following shifts: "; for (int i = 0; i < cbtt.Length; i++) { Console.WriteLine("cbtt='" + cbtt[i] + "'"); var tblNew = DataTableUtility.Select(csvNew, "cbtt='" + cbtt[i] + "'", "date_measured"); var tblOld = DataTableUtility.Select(csvOld, "cbtt='" + cbtt[i] + "'", "date_measured"); if (tblNew.Rows.Count > 0) { var shftNew = tblNew.Rows[tblNew.Rows.Count - 1]["shift"].ToString(); var dateMeasured = tblNew.Rows[tblNew.Rows.Count - 1]["date_measured"].ToString(); double?discharge = null; var q = 0.0; if (double.TryParse(tblNew.Rows[tblNew.Rows.Count - 1]["discharge"].ToString(), out q)) { discharge = q; } var gh1 = 0.0; double?gh = null; if (double.TryParse(tblNew.Rows[tblNew.Rows.Count - 1]["stage"].ToString(), out gh1)) { gh = gh1; } if (tblOld.Rows.Count > 0) { var shftOld = tblOld.Rows[tblOld.Rows.Count - 1]["shift"].ToString(); if (shftNew != shftOld && shftNew != "") { InsertShiftToPostgres(cbtt[i], "ch", Convert.ToDouble(shftNew), dateMeasured, discharge, gh); emailMsg = emailMsg + cbtt[i] + " applied a shift of " + shftNew + ", "; } } else if (shftNew != "") { InsertShiftToPostgres(cbtt[i], "ch", Convert.ToDouble(shftNew), dateMeasured, discharge, gh); emailMsg = emailMsg + cbtt[i] + " applied a shift of " + shftNew + ", "; } } } if (emailMsg.Contains("applied")) { // check who needs to be included on email if (emailMsg.Contains("MIII") || emailMsg.Contains("MLCI") || emailMsg.Contains("TCNI")) { recipients.Add("*****@*****.**"); } if (emailMsg.Contains("NMCI")) { recipients.Add("*****@*****.**"); } if (emailMsg.Contains("SMCI")) { recipients.Add("*****@*****.**"); } Console.WriteLine("found shifts. Sending email "); SendEmail("IDWR Shift Update", emailMsg, recipients); } else { Console.WriteLine("No shift changes found"); } }
static void Main(string[] argList) { //Logger.EnableLogger(); //These could be populated from a config file string[] HydrometParameterCodes = new string[] { "SI", "OBM", "TU", "WS", "WD", "WG", "BP", "PC", "TP" }; string[] NiceNetParameterNames = new string[] { "SolarRadKw/m2", "AveAirTempF", "AveRelHum%", "AveWindSpeedmph", "VectorWindDirdeg", "MaxWindGustmph", "BaroPressmb", "TotalPrecipin", "DewPointTemp" }; double[] NiceNetParameterUpperLimits = new double[] { 120, 120, 101, 100, 360, 100, 32, 51, 100 }; double[] NiceNetParameterLowerLimits = new double[] { 0, -50, 0, 0, 0, 0, 27, 0, -35 }; //SI comes in as Kw/m2 and is converted to ly/hr string[] NiceNetParameterUnits = new string[] { "ly/hr", "deg F", "%", "mph", "deg", "mph", "mb", "in", "deg F" }; var ParameterSet = new Dictionary <string, Parameter>(); for (int i = 0; i < HydrometParameterCodes.Length; i++) { var tempParameter = new Parameter(); tempParameter.UpperLimitValue = NiceNetParameterUpperLimits[i]; tempParameter.LowerLimitValue = NiceNetParameterLowerLimits[i]; tempParameter.UpperLimitFlag = "+"; tempParameter.LowerLimitFlag = "-"; tempParameter.Name = NiceNetParameterNames[i]; tempParameter.Units = NiceNetParameterUnits[i]; tempParameter.Code = HydrometParameterCodes[i]; ParameterSet.Add(HydrometParameterCodes[i], tempParameter); } var titleSeparator = new string[] { " ", "." }; if (argList.Length == 0) { Usage(); return; } Arguments args = new Arguments(argList); if (!args.Contains("config")) { Console.WriteLine("Error: --config=filename.csv is required"); Usage(); return; } if (!args.Contains("output")) { Console.WriteLine("Error: --output=filename.txt is required"); Usage(); return; } int hoursBack = 4; if (args.Contains("back")) { hoursBack = int.Parse(args["back"]); } //FileUtility.CleanTempPath(); //Read config file and filter on cbtt if a particular one is specified. DataTable csv = new CsvFile(args["config"], CsvFile.FieldTypes.AllText); if (args.Contains("cbtt")) { Console.WriteLine("Filtering for cbtt = '" + args["cbtt"] + "'"); csv = DataTableUtility.Select(csv, "cbtt='" + args["cbtt"] + "'", ""); } var rows = csv.Select(); //Selects all rows of config file, excluding the header row Console.WriteLine("Processing data for " + rows.Length + " site(s)"); Dictionary <string, string> bumParameters = new Dictionary <string, string>(); List <String> bumSites = new List <string>(); // Begin loop to read in data for each site. for (int i = 0; i < rows.Length; i++) { //Get site/cbtt from config file var site = rows[i]["dri_id"].ToString(); var cbtt = rows[i]["cbtt"].ToString(); var obm = new Series(); var tu = new Series(); Console.WriteLine("Processing site " + site + "/" + cbtt); // example http://www.wrcc.dri.edu/cgi-bin/nclvLIST.pl string url = "http://www.wrcc.dri.edu/cgi-bin/" + site.ToLower() + "LIST.pl"; //Catch missing web pages try { string[] dataPage = Web.GetPage(url); //Work through unwanted header rows of NiceNet data file. int j = 0; while (dataPage[j].Contains("<")) { j++; } //Get column header names //Revise this so that column order can change with out breaking the code. Might need to assume fixed width. var columnNames = dataPage[j].Split(titleSeparator, StringSplitOptions.RemoveEmptyEntries).ToList(); j++; while (!dataPage[j].Contains("--")) { var tempColumnNames = dataPage[j].Split(titleSeparator, StringSplitOptions.RemoveEmptyEntries).ToList(); if (tempColumnNames.Contains("Year")) { tempColumnNames.Insert(2, ""); } for (int n = 0; n < tempColumnNames.Count; n++) { columnNames[n] = columnNames[n] + tempColumnNames[n]; } j++; } //Get year from header var lastHeaderRow = j - 1; var startDataRow = j + 1; var dataHeaderRow = dataPage[lastHeaderRow].Split(titleSeparator, StringSplitOptions.RemoveEmptyEntries).ToList(); var year = Int32.Parse(dataHeaderRow[0]); //Process data for each parameter of current site and save to hydrometfile //First two parameters are day and time foreach (var p in ParameterSet) { //Assign PCode and look for missing/unexpected parameters in the //data file. //Skip dew point (TP). It is not in the input file. It is calculated later. if (p.Key == "TP") { continue; } try { var pCode = p.Key; var dataIndex = columnNames.IndexOf(p.Value.Name); var unitType = p.Value.Units; var s = ProcessDataSeries(cbtt, year, unitType, pCode, dataIndex, dataPage, startDataRow); s = ApplyFlags(s, p.Value); if (pCode == "OBM") { obm = s; } if (pCode == "TU") { tu = s; } //Only load the last hoursBack hours of data s.Trim(DateTime.Now.AddHours(-hoursBack), DateTime.Now.AddHours(2)); HydrometInstantSeries.WriteToHydrometFile(s, cbtt, pCode, "nicenet", args["output"], true); } catch { Console.WriteLine("Parameter \"" + p + "\" is not recognized."); } } var tp = DewPointCalculation(obm, tu, cbtt); if (tp.Count > 0) { tp = ApplyFlags(tp, ParameterSet["TP"]); HydrometInstantSeries.WriteToHydrometFile(tp, cbtt, "TP", "nicenet", args["output"], true); } else { Console.WriteLine("Dew point temperature could not be calculated."); } } catch (Exception e) { Console.WriteLine(e.Message); bumSites.Add(site); } } if (bumSites.Count >= 1) { Console.WriteLine("The following sites were not found:"); Console.WriteLine(String.Join("\n", bumSites.ToArray())); } }
static void Main(string[] argList) { if (argList.Length == 0) { Usage(); return; } DateTime t = DateTime.Now.Date; bool recentDataOnly = true;// defaults using only last 4 hours, unless dates are specified. Arguments args = new Arguments(argList); if (args.Contains("t")) { recentDataOnly = false; if (!DateTime.TryParse(args["t"], out t)) { Console.WriteLine("Error: invalid date '" + args["t"] + "'"); Usage(); return; } } DateTime t1 = t; DateTime t2 = t; if (args.Contains("t1")) { recentDataOnly = false; if (!DateTime.TryParse(args["t1"], out t1)) { Console.WriteLine("Error: invalid date t1 '" + args["t1"] + "'"); Usage(); return; } } if (args.Contains("t2")) { recentDataOnly = false; if (!DateTime.TryParse(args["t2"], out t2)) { Console.WriteLine("Error: invalid date t2 '" + args["t2"] + "'"); Usage(); return; } } if (!args.Contains("config")) { Console.WriteLine("Error: --config=filename.csv is required"); Usage(); return; } if (!args.Contains("output")) { Console.WriteLine("Error: --output=filename.txt is required"); Usage(); return; } // read config file. // cbtt,inel_id,inel_code,hydromet_pcode DataTable csv = new CsvFile(args["config"], CsvFile.FieldTypes.AllText); if (args.Contains("cbtt")) // filter specific site { Console.WriteLine("Filtering for cbtt = '" + args["cbtt"] + "'"); csv = DataTableUtility.Select(csv, "cbtt='" + args["cbtt"] + "'", ""); } t = t1; while (t <= t2) { ProcessDate(t, args, csv, recentDataOnly); t = t.AddDays(1).Date; } }
// Consider: use hydromet convention for time stamps for PC and SWE? /// <summary> /// Reads daily snowtel data from NRCS web service /// and saves to a text file /// --output=snowtel.txt : output filename (required) /// --t1=1-31-2013 : starting date: default is 30 days ago /// --t2=1-31-2013 : ending date: default is today /// --filter="cbtt='PVRO'" : filter snotel site list. /// --debug : enable debugging /// </summary> /// <param name="args"></param> static void Main(string[] argList) { //ServicePointManager.ServerCertificateValidationCallback += (o, certificate, chain, errors) => true; //ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12; // System.Security.Cryptography.AesCryptoServiceProvider b = new System.Security.Cryptography.AesCryptoServiceProvider(); string outputFileName = ""; bool appendToFile = false; // for output file. Arguments args = new Arguments(argList); if (args.Contains("output")) { outputFileName = args["output"]; Console.WriteLine("Saving to " + outputFileName); } else { Console.WriteLine("Usage: GetSnotel --output=filename [--debug] [--t1=1-31-2013] [--t2=1-31-2013] [--filter=\"cbtt='jkpi'\"]"); return; } if (args.Contains("debug")) { Logger.EnableLogger(); Reclamation.TimeSeries.Parser.SeriesExpressionParser.Debug = true; } string filter = ""; if (args.Contains("filter")) { filter = args["filter"]; } DateTime t1 = DateTime.Now.AddDays(-30); DateTime t2 = DateTime.Now; if (args.Contains("t1")) { t1 = DateTime.Parse(args["t1"]); } if (args.Contains("t2")) { t2 = DateTime.Parse(args["t2"]); } var tbl = NrcsSnotelSeries.SnotelSites; if (filter != "") { tbl = DataTableUtility.Select(tbl, filter, ""); Logger.WriteLine("found " + tbl.Rows.Count + " with filter=" + filter); } for (int i = 0; i < tbl.Rows.Count; i++) { var cbtt = tbl.Rows[i]["cbtt"].ToString(); if (cbtt.Trim() == "") { continue; } Logger.WriteLine(cbtt + " "); double pct = (double)i / (double)tbl.Rows.Count * 100; Console.WriteLine(cbtt + " " + pct.ToString("F1") + "%"); var triplet = SnotelSeries.GetTriplet(cbtt); Series pc = new SnotelSeries(triplet, SnotelParameterCodes.PREC); Series se = new SnotelSeries(triplet, SnotelParameterCodes.WTEQ); Series sd = new SnotelSeries(triplet, SnotelParameterCodes.SNWD); Series mm = new SnotelSeries(triplet, SnotelParameterCodes.TAVG); Series mx = new SnotelSeries(triplet, SnotelParameterCodes.TMAX); Series mn = new SnotelSeries(triplet, SnotelParameterCodes.TMIN); Series[] items = new Series[] { pc, se, sd, mm, mx, mn }; String[] pcodes = new string[] { "pc", "se", "sd", "mm", "mx", "mn" }; for (int p = 0; p < items.Length; p++) { var s = items[p]; s.Read(t1, t2); if (s.Count == 0) { // add to message Console.WriteLine("No data found for " + cbtt + "/" + pcodes[p]); // Console.WriteLine(s.Messages.ToString()); } else { //TimeSeriesRouting.RouteDaily(s, cbtt, pcodes[p], RouteOptions.Incoming); HydrometDailySeries.WriteToArcImportFile(s, cbtt, pcodes[p], outputFileName, appendToFile); if (!appendToFile) { appendToFile = true; // append after the first time. } } System.Threading.Thread.Sleep(100); // small pause seems to be preventing timeouts? } // compute snow density if (se.Count > 0 && sd.Count > 0) { Series ss = se / sd; for (int j = 0; j < ss.Count; j++) { var pt = ss[j]; if ((sd.IndexOf(pt.DateTime) >= 0 && sd[pt.DateTime].Value == 0) || (se.IndexOf(pt.DateTime) >= 0 && se[pt.DateTime].Value == 0)) { pt.Value = 0; pt.Flag = ""; ss[j] = pt; } } HydrometDailySeries.WriteToArcImportFile(ss, cbtt, "ss", outputFileName, appendToFile); if (!appendToFile) { appendToFile = true; // append after the first time. } } } Console.WriteLine((GC.GetTotalMemory(false) / 1024.0 / 1024.0).ToString("F3") + " Mb memory used"); }
static void Main(string[] args) { if (args.Length < 4 || args.Length > 5) { Console.WriteLine("Usage:"); Console.WriteLine("30_year_avg creates 30 year average pisces database"); Console.WriteLine("Usage: 30_year_avg config.csv group|all output.db boise|yakima [hmet.txt]"); Console.WriteLine("config.csv example below:\n"); Console.WriteLine("group,station,daily_pcode,title,ylabel"); Console.WriteLine("Boise Payette,plei,qd,\"Payette River near Letha, ID\",Discharge - cfs"); Console.WriteLine(" Boise Payette,emm,qd,\"Payette River near Emmett, ID\",Discharge - cfs"); Console.WriteLine("\ngroup is used to filter specific parts of config file. enter all to disable filtering"); Console.WriteLine("output.db is the name of a pisces database that will be created."); Console.WriteLine("boise|yakima specifiy which hydromet server to read data from"); Console.WriteLine("hmet.txt is an optional output with hydromet daily format"); return; } string fn = args[2]; if (File.Exists(fn)) { Console.WriteLine("Deleting existing database "); File.Delete(fn); } var svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr); HydrometHost host = HydrometHost.PN; if (args[3] == "yakima") { host = HydrometHost.Yakima; } DataTable config = new CsvFile(args[0]); if (args[1] != "all") { // filter out specific group config = DataTableUtility.Select(config, "group = '" + args[1] + "'", ""); } if (args.Length == 5 && File.Exists(args[4])) { Console.WriteLine("deleting " + args[4]); File.Delete(args[4]); } var prevFolderName = Guid.NewGuid().ToString(); PiscesFolder folder = null; for (int x = 0; x < config.Rows.Count; x++) { var row = config.Rows[x]; string folderName = row["group"].ToString(); if (prevFolderName != folderName) { prevFolderName = folderName; folder = db.AddFolder(folderName); } string CBTT = row["station"].ToString(); string Pcode = row["daily_pcode"].ToString(); Console.WriteLine(CBTT + " " + Pcode); Series s = new HydrometDailySeries(CBTT, Pcode, host); // Data ranges collected var t1 = new DateTime(1980, 10, 1); var t2 = new DateTime(2010, 9, 30); s.Read(t1, t2); var s7100 = LabelAndSave30Year(db, 7100, CBTT, Pcode, folder, host); var s8110 = LabelAndSave30Year(db, 8110, CBTT, Pcode, folder, host); var s6190 = LabelAndSave30Year(db, 6190, CBTT, Pcode, folder, host); //Creates thirty-year average from raw data and adds to database var avg = Reclamation.TimeSeries.Math.MultiYearDailyAverage(s, 10); avg.Name = "avg 1981-2010 " + CBTT + " " + Pcode; avg.Table.TableName = "avg_1981_2010" + CBTT + "" + Pcode; db.AddSeries(avg, folder); avg = Reclamation.TimeSeries.Math.ShiftToYear(avg, 8109); if (args.Length == 5) { HydrometDailySeries.WriteToArcImportFile(avg, CBTT, Pcode, args[4], true); } } }