public static DataTable DayFilesTable(HydrometHost svr, string query, DateTime t1, DateTime t2, int back = 0, int interval = 0) { query = HydrometInfoUtility.ExpandQuery(query, TimeInterval.Irregular); string cgiUrl = ReclamationURL.GetUrlToDataCgi(svr, TimeInterval.Irregular); var rval = Table(cgiUrl, query, t1, t2, back); if (interval != 0 && rval.Rows.Count > 0) {// put nulls in table as needed. rval.PrimaryKey = new DataColumn[] { rval.Columns[0] }; DateTime next = Convert.ToDateTime(rval.Rows[0][0]).AddMinutes(interval);; for (int i = 1; i < rval.Rows.Count; i++) { DateTime t = Convert.ToDateTime(rval.Rows[i][0]); if (t > next) {// insert new row var row = rval.NewRow(); row[0] = next; next = next.AddMinutes(interval); rval.Rows.InsertAt(row, i); } else { next = t.AddMinutes(interval); } } } rval.PrimaryKey = null; return(rval); }
public static DataTable MPollTable(HydrometHost server, string query, DateTime t1, DateTime t2) { query = HydrometInfoUtility.ExpandQuery(query, TimeInterval.Monthly); string cgiUrl = ReclamationURL.GetUrlToDataCgi(server, TimeInterval.Monthly); Logger.WriteLine("url:" + cgiUrl); return(Table(cgiUrl, query, t1, t2, endOfMonth: true)); }
public static DataTable ArchiveTable(HydrometHost server, string query, DateTime t1, DateTime t2, int back = 0) { query = HydrometInfoUtility.ExpandQuery(query, TimeInterval.Daily); string cgiUrl = ReclamationURL.GetUrlToDataCgi(server, TimeInterval.Daily); return(Table(cgiUrl, query, t1, t2, back)); }
private void ReadFromWeb(DateTime t1, DateTime t2) { string query = ""; query = ReclamationURL.GetUrlToDataCgi(server, TimeSeries.TimeInterval.Irregular); query += "?parameter=" + cbtt + " " + pcode; query += "&syer=" + t1.Year; query += "&smnth=" + t1.Month; query += "&sdy=" + t1.Day; query += "&eyer=" + t2.Year; query += "&emnth=" + t2.Month; query += "&edy=" + t2.Day; string[] data = Web.GetPage(query, HydrometInfoUtility.WebCaching); Read(data); Messages.Add(query); Messages.Add("Returned " + Count + " records "); }
private void ReadFromWeb(DateTime t1, DateTime t2) { if (t2 >= DateTime.Now && t2.Year < 6000) { t2 = DateTime.Now.Date; } string payload = "parameter=" + cbtt + " " + pcode + "&syer=" + t1.Year.ToString() + "&smnth=" + t1.Month.ToString() + "&sdy=" + t1.Day.ToString() + "&eyer=" + t2.Year.ToString() + "&emnth=" + t2.Month.ToString() + "&edy=" + t2.Day.ToString() + "&format=2"; string query = ReclamationURL.GetUrlToDataCgi(server, TimeSeries.TimeInterval.Daily); string[] data = Web.GetPage(query, payload, HydrometInfoUtility.WebCaching); TextFile tf = new TextFile(data); int idx1 = tf.IndexOf("BEGIN DATA"); int idx2 = tf.IndexOf("END DATA"); if (idx2 == -1)// could be error like on oct 1 { idx2 = tf.Length - 1; } if (idx1 < 0 || idx2 < 0 || idx2 == idx1 + 1) { Logger.WriteLine("no data found"); } Messages.Add(query + "&" + payload); int errorCount = 0; for (int i = idx1 + 2; i < idx2; i++) { string[] tokens = data[i].Split(','); if (tokens.Length != 2) { continue; } DateTime t; if (DateTime.TryParse(tokens[0], out t)) { double result = Point.MissingValueFlag; if (!double.TryParse(tokens[1], out result)) { if (errorCount < 50) { Logger.WriteLine("Error parsing " + data[i]); } errorCount++; AddMissing(t); } else { Add(t, result); } } } Messages.Add("Returned " + Count + " records "); }