public void AddSeriesDirectly() { Logger.EnableLogger(); var filename = FileUtility.GetTempFileNameInDirectory(@"c:\temp\", ".pdb"); if (File.Exists(filename)) { File.Delete(filename); } Console.WriteLine(filename); var server = new SQLiteServer(filename); var db = new TimeSeriesDatabase(server); // create a folder for each month for (int i = 1; i <= 12; i++) { var t = new DateTime(2015, i, 1); db.AddFolder("Months", t.ToString("MMMM")); } // Add USGS series (Boise River) to the January Folder Series s = new UsgsDailyValueSeries("13206000", UsgsDailyParameter.DailyMeanDischarge); s.SiteID = "13206000"; var folder = db.GetOrCreateFolder("Months", "January"); s.Read(DateTime.Parse("2015-01-01"), DateTime.Parse("2015-01-10")); db.AddSeries(s, folder); // Add Hydromet series to the February Folder s = new HydrometDailySeries("bhr", "af", HydrometHost.GreatPlains); s.Name = "gphyd_bhr_af"; s.SiteID = "gphyd_bhr"; var feb = db.GetOrCreateFolder("Months", "February"); db.AddSeries(s, feb); // Add Csv file data to March Folder. s = new TextSeries(@"c:\temp\test_river.csv"); s.Read();// read data. Use Read(t1,t2) to limit by dates s.SiteID = "test"; s.Units = "cfs"; s.Table.TableName = "test_river"; // table name needs to be unique db.AddSeries(s, db.GetOrCreateFolder("Months", "March")); s = db.GetSeriesFromName("gphyd_bhr_af"); s.Read(); Console.WriteLine(s.Count); // Add CSV file db.Inventory(); }
public void TextFileHydromet() { string path = TestData.DataPath + "\\"; string fn1 = path + "LuckyPeakWaterLevel.txt"; Console.WriteLine("reading " + fn1); TextSeries s = new TextSeries(fn1); s.Read(); Assert.IsTrue(s.Count > 0); DateTime t1 = Convert.ToDateTime("10/13/2004"); double v = s.Lookup(t1); Assert.IsTrue(System.Math.Abs(v - 2907.2) < 0.01, "expected 2907.2. got " + v); // save to text file.. string fn = Path.GetTempFileName(); //fn = TestData.OutputPath+"\\"+fn; s.WriteCsv(fn); TextSeries s1 = new TextSeries(fn); s1.Read(); Assert.IsTrue(s.Count == s1.Count); v = s1.Lookup(t1); Assert.IsTrue(System.Math.Abs(v - 2907.2) < 0.01, "expected 2907.2. got " + v); File.Delete(fn); Console.WriteLine("finished TextFileHydromet"); }
public void DeleteSelectionInEL68D() { //2005-02-02 06:53:52.331,98.4335632324219 //2005-02-04 09:24:53.233,98.4335632324219 string fn = Path.Combine(TestData.DataPath, "el68d_export.csv"); TextSeries s = new TextSeries(fn); s.Read(); Assert.AreEqual(s.Count, 1145, "Test Data has been modified....expected 1145 records. found " + s.Count); DateTime delete1 = Convert.ToDateTime("2005-02-02 06:53:52.331"); DateTime delete2 = Convert.ToDateTime("2005-02-04 09:24:53.233"); Assert.AreEqual(186, s.IndexOf(delete1), "test data has been modified could not find " + delete1.ToShortDateString() + " " + delete1.ToLongTimeString()); Assert.AreEqual(490, s.IndexOf(delete2), "test data has been modified could not find " + delete2.ToShortDateString() + " " + delete2.ToLongTimeString()); DateTime t1 = new DateTime(2005, 2, 2); DateTime t2 = new DateTime(2005, 2, 4, 10, 30, 0, 0); Selection sel = new Selection(t1, t2, 30, 200); s.Delete(sel);// should delete two records in selection. Assert.AreEqual(s.Count, 1143, "expected 1143 records. found " + s.Count); Assert.AreEqual(-1, s.IndexOf(delete1), "delete1 point was not deleted "); Assert.AreEqual(-1, s.IndexOf(delete2), "delete2 point was not deleted "); }
private void buttonOpen_Click(object sender, EventArgs e) { this.buttonSave.Enabled = false; if (openFileDialog1.ShowDialog() == DialogResult.OK) { var fn = openFileDialog1.FileName; this.textBoxFilename.Text = fn; DataTable tbl = null; if (Path.GetExtension(fn).IndexOf("xls") >= 0) {// xls or xlsx (Excel) NpoiExcel xls = new NpoiExcel(fn); DataTable template = new DataTable("watertemp"); template.Columns.Add("DateTime", typeof(DateTime)); template.Columns.Add("val", typeof(double)); tbl = xls.ReadDataTable(0, template, true); } else if (Path.GetExtension(fn).IndexOf("csv") >= 0) { // csv //tbl = new CsvFile(fn, CsvFile.FieldTypes.AllText); var s = new TextSeries(fn); s.Read(); tbl = s.Table; } m_series = CreateSeries(tbl); this.dataGridView1.DataSource = m_series.Table; this.timeSeriesTeeChartGraph1.Series.Clear(); this.timeSeriesTeeChartGraph1.Series.Add(m_series); this.timeSeriesTeeChartGraph1.Draw(true); this.comboBoxPcode.SelectedIndex = -1; } }
public void DatabaseAlarmTest() { // create database with alarm def var fn = FileUtility.GetTempFileName(".pdb"); SQLiteServer svr = new SQLiteServer(fn); TimeSeriesDatabase db = new TimeSeriesDatabase(svr); var ds = db.Alarms; ds.AddNewAlarmGroup("palisades"); ds.alarm_definition.Addalarm_definitionRow(true, "palisades", "pal", "fb", "above 5520", "", 10); ds.SaveTable(ds.alarm_definition); ds.alarm_recipient.Addalarm_recipientRow("palisades", 4, "5272", "office", "*****@*****.**"); ds.SaveTable(ds.alarm_recipient); TextSeries s = new TextSeries(Path.Combine(TestData.DataPath, "alarms", "pal_fb.csv")); //TO DO .. read flags s.Parameter = "fb"; s.SiteID = "pal"; s.Read(); Assert.IsTrue(s.Count > 500); ds.Check(s); var queue = ds.GetAlarmQueue(); Assert.AreEqual(1, queue.Count, "expected 1 alarm in the queue"); }
public void DatabaseAboveAlarmTest() { // create database with alarm def var ds = db.Alarms; ds.AddNewAlarmGroup("palisades"); ds.alarm_definition.Addalarm_definitionRow(true, "palisades", "pal", "fb", "above 5520", "", 10); ds.SaveTable(ds.alarm_definition); ds.alarm_recipient.Addalarm_recipientRow("palisades", 4, "5272", "office", "*****@*****.**"); ds.SaveTable(ds.alarm_recipient); String file = Path.Combine(TestData.DataPath, "alarms", "pal_fb.csv"); TextSeries s = new TextSeries(file); //TO DO .. read flags s.Parameter = "fb"; s.SiteID = "pal"; s.Read(); Assert.IsTrue(s.Count > 500); ds.Check(s); var queue = ds.GetAlarmQueue(); string sql = "list = 'palisades' AND siteid = 'pal' " + "AND parameter = 'fb' AND status = 'new'"; Assert.IsTrue(queue.Select(sql).Length == 1); }
public void OffsetAfterDeletion() { string fn = TestData.DataPath + "\\el68d_export.csv"; TextSeries s = new TextSeries(fn); s.Read(); int count = s.Count; Assert.AreEqual(s.Count, 1145, "Test Data has been modified....expected 1145 records. found " + s.Count); // delete middle half of data first. int idx = s.Count / 4; Selection sel = new Selection(s[idx].DateTime, s[s.Count - idx - 1].DateTime, -1000, 1000); Console.WriteLine("about to delete selection : " + sel.ToString()); s.Delete(sel); Assert.IsTrue(count > s.Count, "some data should be missing "); double[] values = new double[s.Count]; for (int i = 0; i < values.Length; i++) { values[i] = s[i].Value; // copy data before offset. } // select all data sel = new Selection(s[0].DateTime, s[s.Count - 1].DateTime, -1000, 1000); Reclamation.TimeSeries.Math.Offset(s, sel, System.Math.PI); for (int i = 0; i < values.Length; i++) { Assert.AreEqual(values[i] + System.Math.PI, s[i].Value, 0.000001, "offset failed"); } }
public void DailyIntervalDetection() { string fn = TestData.DataPath + @"\El686_2004DailyAverageStage.csv"; TextSeries s = new TextSeries(fn); s.Read(); Assert.AreEqual(TimeInterval.Daily, s.TimeInterval); }
private static Series ReadExternalSeriesData(string scenarioName, string filename, string externalSiteID) { Series s = new TextSeries(filename); s.Read(); s.Provider = "Series"; s.Table.TableName = (externalSiteID +"_"+ scenarioName).ToLower(); return s; }
private static Series ReadExternalSeriesData(string scenarioName, string filename, string externalSiteID) { Series s = new TextSeries(filename); s.Read(); s.Provider = "Series"; s.Table.TableName = (externalSiteID + "_" + scenarioName).ToLower(); return(s); }
public void PartialDay() { string fn = Path.Combine(TestData.DataPath, "wilson.csv"); // var s = new ExcelDataReaderSeries(fn, "wilson", "A", "B"); var s = new TextSeries(fn); s.Read(); Series avg = Math.TimeWeightedDailyAverage(s); }
public void ExcelCSV() { string fn = Path.Combine(TestData.DataPath, "mmddyyyyhhmmAMPM.txt"); Console.WriteLine("reading " + fn); TextSeries s = new TextSeries(fn); s.Read(); Console.WriteLine("skipped the following\n" + s.Messages.ToString()); Assert.AreEqual(9, s.Count); }
public void mmDDyy() { string fn = Path.Combine(TestData.DataPath, "mmddyy.csv"); Console.WriteLine("reading " + fn); TextSeries s = new TextSeries(fn); s.Read(); Console.WriteLine("skipped the following\n" + s.Messages.ToString()); s.WriteToConsole(); Assert.AreEqual(5, s.Count); }
public void BelowDeadwoodDam() { string fn = Path.Combine(TestData.DataPath, "below Deadwood Dam.csv"); Console.WriteLine("reading " + fn); TextSeries s = new TextSeries(fn); s.Read(); Console.WriteLine("skipped the following\n" + s.Messages.ToString()); //s.WriteToConsole(); Assert.AreEqual(32000, s.Count); }
public void TextFileDigitizedChart() { string fn = TestData.DataPath + "\\el68d_DigitizedChart.txt"; TextSeries s = new TextSeries(fn); s.Read(); //s.Save(TestData.DataPath +"\\el68d_DigitizedChart2.txt"); //1999/01/02 12:40:11, 4.969 DateTime t1 = Convert.ToDateTime("1999/01/02 12:40:11"); Assert.IsTrue(System.Math.Abs(s.Lookup(t1) - 4.969) < 0.001); }
internal static SystemState DetermineSystemState(DateTime t) { if (t.Date == DateTime.Now.Date) { t = t.Date.AddDays(-1); // we dont' have daily value yet for today (use yesterday) } t = t.Date; // state is daily but we could be running previous day at 9:55 am var dir = GetPathToMinimumFlowFiles(); // read avearge contents for three reservoirs var avg = new TextSeries(Path.Combine(dir, "talsys_afavg.csv")); avg.Read(); var talsys_avg = new PeriodicSeries(avg.Table); var t1 = t.Date.AddDays(-1); // current last two days system contents var hmet = new HydrometDailySeries("talsys", "af"); hmet.Read(t1, t); // determine state. Point talsys = hmet[t]; if (talsys.IsMissing) { talsys = hmet[t.AddDays(-1).Date]; // try back one day } if (talsys.IsMissing) { return(SystemState.Unknown); } if (t.Month == 2 && t.Day == 29)// don't lookup 29th in periodic table { t = t.AddDays(-1); } double avg_af = talsys_avg.Interpolate(t); if (talsys.Value >= avg_af + 15000) { return(SystemState.Wet); } if (talsys.Value <= avg_af - 15000) { return(SystemState.Dry); } return(SystemState.Median); }
/// <summary> /// Imports multiple series using an excel control file. /// the control file has one entry per row(series) /// and specifies file format and other details for /// the series. /// </summary> /// <param name="db"></param> /// <param name="excelFilename"></param> public static void Import(TimeSeriesDatabase db, string excelFilename) { NpoiExcel xls = new NpoiExcel(excelFilename); var tbl = xls.ReadDataTable(0, true, true); for (int i = 0; i < tbl.Rows.Count; i++) { var row = tbl.Rows[i]; var format = ReadString(row, "format"); var units = ReadString(row, "units"); var folderName = ReadString(row, "folder"); var filename = ReadString(row, "filename"); if (!Path.IsPathRooted(filename)) { string dir = Path.GetDirectoryName(db.DataSource); filename = Path.Combine(dir, filename); } var siteID = ReadString(row, "siteid"); var name = ReadString(row, "name"); var sheetName = ReadString(row, "sheet"); Series s = null; if (format == "csv" || format == "txt") { s = new TextSeries(filename); s.Read(); } // else if( format == "xls-monthly-wateryear") // { // throw new NotImplementedException("oops the programmer forgot to finish up some work"); // } else if (format == "xls-daily-yearlysheets") { s = ImportMultiSheetDailySeriesExcel.ImportSpreadsheet(filename); } s.Units = units; s.Name = name; s.SiteID = siteID; s.Table.TableName = "ts_" + s.Name.ToLower(); var folder = db.RootFolder; if (folderName != "") { folder = db.GetOrCreateFolder(folder.Name, folderName); } db.AddSeries(s, folder); } }
public void StevensLogger() { string fn = TestData.DataPath + "\\StevensLogger.txt"; TextSeries s = new TextSeries(fn); s.Read(); Assert.AreEqual(13, s.Count); Assert.AreEqual(7.93, s["2007-12-31 22:40"].Value); Assert.AreEqual(1.51, s["2007-8-27 08:40"].Value); Assert.AreEqual(TimeInterval.Irregular, s.TimeInterval); }
public void SevenDay() { string fn = Path.Combine(TestData.DataPath, "sevendayavg.csv"); // var s = new ExcelDataReaderSeries(fn, "Sheet1", "A", "B"); var s = new TextSeries(fn); s.Read(); Assert.AreEqual(2738, s.Count); var s7 = Math.WeeklyAverageSimple(s); s7.WriteToConsole(); Assert.AreEqual(DateTime.Parse("2004-02-12 23:59:59.9"), s7[0].DateTime); Assert.AreEqual(2.17, s7[0].Value, 0.01); Assert.AreEqual(101.32, s7[1].Value, 0.01); }
public void DeepCopy() { string fn = Path.Combine(TestData.DataPath, "el68d_export.csv"); TextSeries s = new TextSeries(fn); s.Read(); s.Name = "First"; Series s2 = s.Copy(); s.Name = "Second"; Console.WriteLine("s.Name = " + s.Name); Console.WriteLine("s2.Name = " + s2.Name); Assert.IsTrue(s2.Name == "First"); Assert.IsTrue(s.Name == "Second"); }
public void SevenDayMovingInstantSparse() { // string fn = Path.Combine(TestData.DataPath, "temp example 7 day max.xls"); // var s = new ExcelDataReaderSeries(fn, "sparse", "C", "D"); var s = new TextSeries(Path.Combine(TestData.DataPath, "temp_sparse.csv")); s.Read(); Series s2 = Reclamation.TimeSeries.Math.SevenDADMAX(s); //Series expected = new ExcelDataReaderSeries(fn, "7dadmax", "A", "B"); TextSeries expected = new TextSeries(Path.Combine(TestData.DataPath, "7dadmax.csv")); expected.Read(); Assert.AreEqual(expected[0].DateTime.Date, s2[0].DateTime.Date); Assert.AreEqual(PointFlag.Missing, s2[0].Flag); // }
/* * * Data Set Export - Flow.DayMean@13087505 Milner Lwr Pwr Plant at Milner - Range: 2017-05-12 00:00 - 2017-05-26 00:00 (UTC-07:00),,,,, * Data on this site may be provisional and subject to revision,,,,, * Timestamp (UTC-07:00),Value (Cubic Feet Per Second),Grade Code,Approval Level,Interpolation Type,Comment * 2017-05-12 00:00:00,5260,0,,8, * 2017-05-13 00:00:00,5250,0,,8, * 2017-05-14 00:00:00,5250,0,,8, * 2017-05-15 00:00:00,5260,0,,8, * 2017-05-16 00:00:00,5240,0,,8, * 2017-05-17 00:00:00,5240,0,,8, * 2017-05-18 00:00:00,5200,0,,8, * 2017-05-19 00:00:00,4290,0,,8, * 2017-05-20 00:00:00,2160,0,,8, * 2017-05-21 00:00:00,244,0,,8, * 2017-05-22 00:00:00,0,0,,8, * 2017-05-23 00:00:00,0,0,,8, * 2017-05-24 00:00:00,0,0,,8, * 2017-05-25 00:00:00,0,0,,8, * 2017-05-26 00:00:00,0,0,,8, */ private static Series ReadFromIdahoPower(string id, DateTime t1, DateTime t2) { //var url = "https://idastream.idahopower.com/Data/Export_Data/?dataset=18942&date=2017-05-12&endDate=2017-05-26&exporttype=csv&type=csv"; var url = "https://idastream.idahopower.com/Data/Export_Data/?dataset=" + id + "&date=" + t1.Date.ToString("yyyy-MM-dd") + "&endDate=" + t2.AddDays(1).ToString("yyyy-MM-dd") + "&exporttype=csv&type=csv"; var fn = DownloadAndUnzip(url); TextSeries s = new TextSeries(fn); s.Read(); s.Trim(t1, t2); return(s); }
private static void Weekly(TextSeries s) { s.Read(DateTime.Parse("2/6/2004"), DateTime.Parse("12/31/2004")); double mf = Point.MissingValueFlag; Point.MissingValueFlag = -9999; s.RemoveMissing(); Point.MissingValueFlag = mf; s.TimeInterval = TimeInterval.Daily; var weekly = Math.WeeklyAverageSimple(s); Assert.AreEqual(TimeInterval.Weekly, weekly.TimeInterval); Assert.AreEqual(DateTime.Parse("2/12/2004"), weekly[0].DateTime.Date); Assert.AreEqual(2.172, weekly[0].Value, 0.01); }
public void SevenDayMovingInstant() { //string fn = Path.Combine(TestData.DataPath, "temp example 7 day max.xls"); //var s = new ExcelDataReaderSeries(fn, "457373", "C", "D"); var s = new TextSeries(Path.Combine(TestData.DataPath, "457373.csv")); s.Read(); Series s2 = Reclamation.TimeSeries.Math.SevenDADMAX(s); TextSeries expected = new TextSeries(Path.Combine(TestData.DataPath, "7dadmax.csv")); expected.Read(); for (int i = 0; i < expected.Count; i++) { Assert.AreEqual(expected[i].Value, s2[i].Value, 0.001); } }
/* * * Data Set Export - Flow.DayMean@13087505 Milner Lwr Pwr Plant at Milner - Range: 2017-05-12 00:00 - 2017-05-26 00:00 (UTC-07:00),,,,, * Data on this site may be provisional and subject to revision,,,,, * Timestamp (UTC-07:00),Value (Cubic Feet Per Second),Grade Code,Approval Level,Interpolation Type,Comment * 2017-05-12 00:00:00,5260,0,,8, * 2017-05-13 00:00:00,5250,0,,8, * 2017-05-14 00:00:00,5250,0,,8, * 2017-05-15 00:00:00,5260,0,,8, * 2017-05-16 00:00:00,5240,0,,8, * 2017-05-17 00:00:00,5240,0,,8, * 2017-05-18 00:00:00,5200,0,,8, * 2017-05-19 00:00:00,4290,0,,8, * 2017-05-20 00:00:00,2160,0,,8, * 2017-05-21 00:00:00,244,0,,8, * 2017-05-22 00:00:00,0,0,,8, * 2017-05-23 00:00:00,0,0,,8, * 2017-05-24 00:00:00,0,0,,8, * 2017-05-25 00:00:00,0,0,,8, * 2017-05-26 00:00:00,0,0,,8, */ private static Series GetData(string id, int hoursBack) { //var url = "https://idastream.idahopower.com/Data/Export_Data/?dataset=18942&date=2017-05-12&endDate=2017-05-26&exporttype=csv&type=csv"; var t2 = DateTime.Now.AddDays(1); var t1 = DateTime.Now.AddHours(-hoursBack); var url = "https://idastream.idahopower.com/Data/Export_Data/?dataset=" + id + "&date=" + t1.ToString("yyyy-MM-dd") + "&endDate=" + t2.ToString("yyyy-MM-dd") + "&exporttype=csv&type=csv"; var fn = DownloadAndUnzip(url); TextSeries s = new TextSeries(fn); s.Read(); s.Trim(t1, t2); return(s); }
public void HeiseResidualTest() { //ExcelDataReaderSeries heii_qu = new ExcelDataReaderSeries(fn, "unregulation", "Date", "HEII_QU", "cfs"); var heii_qu = new TextSeries(Path.Combine(TestData.DataPath, "heii_qu.csv")); heii_qu.Read(); var heii_resid = Math.WaterYearResidual(heii_qu, 7); heii_resid.WriteToConsole(); var expected = new TextSeries(Path.Combine(TestData.DataPath, "heii_residual.csv")); //var expected = new ExcelDataReaderSeries(fn, "unregulation", "Date", "HEII_RESID", "cfs"); expected.Read(); var diff = Math.Sum(heii_resid - expected); Assert.IsTrue(System.Math.Abs(diff) < 0.01); }
public void AnnualSumJanuaryThroughJuly() { string fn = TestData.DataPath + "\\heii_qd.csv"; TextSeries ts = new TextSeries(fn); ts.Read(); Console.WriteLine(ts.Count); Assert.AreEqual(28905, ts.Count, fn + " has been modified"); MonthDayRange rng = new MonthDayRange(1, 1, 7, 31); Series sum = Reclamation.TimeSeries.Math.AnnualSum(ts, rng, 10); sum.WriteToConsole(); Assert.AreEqual(1928, sum[0].DateTime.Year); Assert.AreEqual(2293240, sum[0].Value); Series min = Math.AnnualMin(ts, rng, 10); Series max = Math.AnnualMax(ts, rng, 10); }
private void yyyymmdd(string filename) { DateTime[] dates = { new DateTime(1999, 1, 1, 12, 45, 19, 0), new DateTime(1999, 1, 20, 13, 0, 14, 0), new DateTime(1999, 1, 20, 13, 0, 14, 1), }; double[] values = { 5.056, 5.061, 5.062 }; TextSeries s = new TextSeries(filename); s.Read(); s.WriteToConsole(); Assert.IsTrue(s.Count == dates.Length, "should have 3 dates we have " + s.Count); for (int i = 0; i < dates.Length; i++) { Assert.IsTrue(s[i].DateTime == dates[i]); Assert.IsTrue(System.Math.Abs(s[i].Value - values[i]) < 0.000001); } }
private void mmddyyyy(string filename) { DateTime[] dates = { new DateTime(2005, 2, 1, 13, 2, 0, 0), new DateTime(2005, 2, 1, 13, 2, 4, 0), new DateTime(2005, 3, 1, 13, 3, 0, 0), new DateTime(2005, 4, 1, 2, 0, 1, 0) }; double[] values = { 3, 5, 4.1, 5 }; TextSeries s = new TextSeries(filename); s.Read(); s.WriteToConsole(); Assert.IsTrue(s.Count == dates.Length, "should have 4 dates we have " + s.Count); for (int i = 0; i < dates.Length; i++) { Assert.IsTrue(s[i].DateTime == dates[i]); Assert.IsTrue(System.Math.Abs(s[i].Value - values[i]) < 0.000001); } }
public void DatabaseAboveAlarmTest() { var db = GetNewDatabase(); // create database with alarm def var ds = db.Alarms; ds.AddNewAlarmGroup("palisades"); var def_id = ds.alarm_definition.Addalarm_definitionRow(true, "palisades", "pal", "fb", "above 5520", "").id; ds.SaveTable(ds.alarm_definition); var test = db.Server.Table("alarm_definition"); Console.WriteLine("alarm_definition has " + test.Rows.Count + " rows"); ds.alarm_recipient.Addalarm_recipientRow("palisades", 4, "5272", "office", "*****@*****.**"); ds.SaveTable(ds.alarm_recipient); String file = Path.Combine(TestData.DataPath, "alarms", "pal_fb.csv"); TextSeries s = new TextSeries(file); //TO DO .. read flags s.Parameter = "fb"; s.SiteID = "pal"; s.Read(); Console.WriteLine("pal/fb series count = " + s.Count); Assert.IsTrue(s.Count > 500); ds.Check(s); var queue = ds.GetAlarmQueue(def_id);//"pal", "fb"); Console.WriteLine(DataTableOutput.ToHTML(queue)); Assert.AreEqual(1, queue.Rows.Count); }
/* * CHECK > run * * * MAX/MIN SUMMARY PROCESS DATE: 18-JUL-06 * * Station name RIR Begin and end year 1963-2006 * Parameter code QD Begin and end date OCT 1-SEP 30 * Option MAX * * YEAR DATE MAXIMUM DATE MINIMUM MISS * * 1963 FEB 3 700.00 AUG 30 6.10 0 * 1964 MAY 15 1570.00 OCT 5 17.00 0 * 1965 APR 24 1740.00 OCT 1 39.00 0 * 1966 APR 18 628.00 AUG 12 10.00 0 * 1967 MAY 11 1090.00 AUG 28 17.00 0 * 1968 MAY 3 814.00 NOV 28 21.00 0 * 1969 APR 25 2300.00 NOV 27 27.00 0 * 1970 MAY 19 1820.00 DEC 6 23.00 0 * 1971 MAY 4 2660.00 JAN 7 24.00 0 * 1972 MAY 9 1510.00 JAN 4 44.00 0 * 1973 MAY 6 1320.00 AUG 31 36.00 0 * 1974 APR 26 2170.00 AUG 12 1.20 0 * 1975 MAY 20 2290.00 MAY 7 23.00 0 * 1976 MAY 8 1520.00 JAN 6 0.27 0 * 1977 OCT 28 310.00 FEB 22 8.70 0 * 1978 AUG 3 206.00 NOV 30 4.50 0 * 1979 SEP 19 396.00 JAN 9 0.10 0 * 1980 JUN 4 415.00 JAN 1 0.00 0 * 1981 OCT 10 503.00 DEC 10 0.73 3 * 1982 MAY 20 1284.11 FEB 24 0.00 0 * 1983 MAY 27 1320.13 DEC 30 0.00 0 * 1984 MAY 22 1720.05 FEB 16 0.00 0 * 1985 APR 19 729.47 MAR 31 0.00 1 * 1986 MAY 7 1230.00 NOV 22 0.00 0 * 1987 OCT 11 592.00 DEC 11 0.00 0 * 1988 SEP 23 847.00 NOV 17 0.00 0 * 1989 SEP 23 871.00 NOV 2 0.00 0 * 1990 OCT 1 807.00 NOV 2 0.00 0 * 1991 SEP 24 571.00 NOV 23 0.00 0 * 1992 OCT 5 872.00 NOV 1 0.00 0 * 1993 SEP 29 773.00 NOV 3 0.00 0 * 1994 AUG 5 750.00 NOV 1 0.00 0 * 1995 MAY 26 478.00 NOV 1 0.00 0 * 1996 MAY 20 888.00 NOV 2 0.00 0 * 1997 MAY 7 1750.00 NOV 22 0.00 0 * 1998 MAY 24 745.00 NOV 13 0.00 0 * 1999 MAY 6 885.00 NOV 14 0.00 0 * 2000 OCT 6 414.00 NOV 30 0.00 0 * 2001 AUG 18 429.00 NOV 16 0.00 0 * 2002 AUG 29 455.00 OCT 1 0.00 0 * 2003 AUG 26 439.00 OCT 1 0.00 0 * 2004 AUG 31 348.20 OCT 1 0.00 0 * 2005 SEP 21 202.02 OCT 13 0.00 0 * 2006 MAY 2 1119.98 NOV 22 0.00 75 * * Avg of 44 yrs 1010.95 6.88 * */ /// <summary> /// Test annual sum and compare to check program /// VOLAF option for heise unregulated flow /// </summary> public void Vol_AF_HEII() { //heii_quAF.csv string fn = TestData.DataPath + @"\heii_quAF.csv"; TextSeries ts = new TextSeries(fn); ts.Read(); Assert.AreEqual(28916, ts.Count, "file has changed"); MonthDayRange r = new MonthDayRange(3, 12, 5, 1); Series sum = Math.AnnualSum(ts, r, 10); double[] expected = { 380313.47, 424559.03, 476518.66, 522272.94, 451835.38, 702204.13 }; // sum.WriteToConsole(); for (int yr = 2001; yr <= 2006; yr++) { DateTime t1 = new DateTime(yr, 1, 1); DateTime t2 = new DateTime(yr, 12, 31); Series sYear = Math.Subset(sum, t1, t2); Assert.AreEqual(expected[yr - 2001], sYear[0].Value, 3); } }
public void AprilAverageHrsi() { Series s = new TextSeries(Path.Combine(TestData.DataPath, "hrsiDailyModsim.csv")); s.Read(); Assert.AreEqual(5418, s.Count,"reading file"); double[] expected ={ 11133.3, 8613.8, 14385.9, 10328.0, 9689.5, 12839.8, 10611.5, 13445.1, 13052.1, 10152.5, 12536.3, 7573.6, 11985.8, 10534.4, 9936.5, 12047.7, 13240.2, 10389.1, 13993.5, 10028.9, 9384.8, 12515.8, 14188.1, 7887.3, 9998.4, 13970.7, 7438.4, 10280.7, 9050.2, 10602.3, 10659.1, 7180.2, 11815.6, 14941.5, 11761.7, 10830.9, 11825.9, 11968.4, 9618.7, 9729.7, 10152.0, 14670.9, 12391.5, 16330.4, 14960.8, 14946.8, 16466.4, 10045.9, 17344.9, 10593.5, 15629.1, 10727.6, 11894.0, 11303.9, 10471.4, 10555.0, 15624.5, 13681.5, 14531.3, 10780.0, 13061.1, 11222.4, 14146.1}; MonthDayRange range = new MonthDayRange(4, 1, 4, 30); Series s2 = Reclamation.TimeSeries.Math.AggregateAndSubset(StatisticalMethods.Average, s, range, 10); Assert.AreEqual(63, s2.Count); Assert.AreEqual(1944, s2[0].DateTime.Year); for (int i = 0; i < s2.Count; i++) { Assert.AreEqual(expected[i], s2[i].Value, 0.1); } }