public void TextFileHydromet() { string path = TestData.DataPath + "\\"; string fn1 = path + "LuckyPeakWaterLevel.txt"; Console.WriteLine("reading " + fn1); TextSeries s = new TextSeries(fn1); s.Read(); Assert.IsTrue(s.Count > 0); DateTime t1 = Convert.ToDateTime("10/13/2004"); double v = s.Lookup(t1); Assert.IsTrue(System.Math.Abs(v - 2907.2) < 0.01, "expected 2907.2. got " + v); // save to text file.. string fn = Path.GetTempFileName(); //fn = TestData.OutputPath+"\\"+fn; s.WriteCsv(fn); TextSeries s1 = new TextSeries(fn); s1.Read(); Assert.IsTrue(s.Count == s1.Count); v = s1.Lookup(t1); Assert.IsTrue(System.Math.Abs(v - 2907.2) < 0.01, "expected 2907.2. got " + v); File.Delete(fn); Console.WriteLine("finished TextFileHydromet"); }
public void DeleteSelectionInEL68D() { //2005-02-02 06:53:52.331,98.4335632324219 //2005-02-04 09:24:53.233,98.4335632324219 string fn = Path.Combine(TestData.DataPath, "el68d_export.csv"); TextSeries s = new TextSeries(fn); s.Read(); Assert.AreEqual(s.Count, 1145, "Test Data has been modified....expected 1145 records. found " + s.Count); DateTime delete1 = Convert.ToDateTime("2005-02-02 06:53:52.331"); DateTime delete2 = Convert.ToDateTime("2005-02-04 09:24:53.233"); Assert.AreEqual(186, s.IndexOf(delete1), "test data has been modified could not find " + delete1.ToShortDateString() + " " + delete1.ToLongTimeString()); Assert.AreEqual(490, s.IndexOf(delete2), "test data has been modified could not find " + delete2.ToShortDateString() + " " + delete2.ToLongTimeString()); DateTime t1 = new DateTime(2005, 2, 2); DateTime t2 = new DateTime(2005, 2, 4, 10, 30, 0, 0); Selection sel = new Selection(t1, t2, 30, 200); s.Delete(sel);// should delete two records in selection. Assert.AreEqual(s.Count, 1143, "expected 1143 records. found " + s.Count); Assert.AreEqual(-1, s.IndexOf(delete1), "delete1 point was not deleted "); Assert.AreEqual(-1, s.IndexOf(delete2), "delete2 point was not deleted "); }
public void DatabaseAboveAlarmTest() { // create database with alarm def var ds = db.Alarms; ds.AddNewAlarmGroup("palisades"); ds.alarm_definition.Addalarm_definitionRow(true, "palisades", "pal", "fb", "above 5520", "", 10); ds.SaveTable(ds.alarm_definition); ds.alarm_recipient.Addalarm_recipientRow("palisades", 4, "5272", "office", "*****@*****.**"); ds.SaveTable(ds.alarm_recipient); String file = Path.Combine(TestData.DataPath, "alarms", "pal_fb.csv"); TextSeries s = new TextSeries(file); //TO DO .. read flags s.Parameter = "fb"; s.SiteID = "pal"; s.Read(); Assert.IsTrue(s.Count > 500); ds.Check(s); var queue = ds.GetAlarmQueue(); string sql = "list = 'palisades' AND siteid = 'pal' " + "AND parameter = 'fb' AND status = 'new'"; Assert.IsTrue(queue.Select(sql).Length == 1); }
public void WeeklyAverageFromDaily() { string fn = TestData.DataPath + @"\El686_2004DailyAverageStage.csv"; TextSeries s = new TextSeries(fn); Weekly(s); }
public void DatabaseAlarmTest() { // create database with alarm def var fn = FileUtility.GetTempFileName(".pdb"); SQLiteServer svr = new SQLiteServer(fn); TimeSeriesDatabase db = new TimeSeriesDatabase(svr); var ds = db.Alarms; ds.AddNewAlarmGroup("palisades"); ds.alarm_definition.Addalarm_definitionRow(true, "palisades", "pal", "fb", "above 5520", "", 10); ds.SaveTable(ds.alarm_definition); ds.alarm_recipient.Addalarm_recipientRow("palisades", 4, "5272", "office", "*****@*****.**"); ds.SaveTable(ds.alarm_recipient); TextSeries s = new TextSeries(Path.Combine(TestData.DataPath, "alarms", "pal_fb.csv")); //TO DO .. read flags s.Parameter = "fb"; s.SiteID = "pal"; s.Read(); Assert.IsTrue(s.Count > 500); ds.Check(s); var queue = ds.GetAlarmQueue(); Assert.AreEqual(1, queue.Count, "expected 1 alarm in the queue"); }
public void OffsetAfterDeletion() { string fn = TestData.DataPath + "\\el68d_export.csv"; TextSeries s = new TextSeries(fn); s.Read(); int count = s.Count; Assert.AreEqual(s.Count, 1145, "Test Data has been modified....expected 1145 records. found " + s.Count); // delete middle half of data first. int idx = s.Count / 4; Selection sel = new Selection(s[idx].DateTime, s[s.Count - idx - 1].DateTime, -1000, 1000); Console.WriteLine("about to delete selection : " + sel.ToString()); s.Delete(sel); Assert.IsTrue(count > s.Count, "some data should be missing "); double[] values = new double[s.Count]; for (int i = 0; i < values.Length; i++) { values[i] = s[i].Value; // copy data before offset. } // select all data sel = new Selection(s[0].DateTime, s[s.Count - 1].DateTime, -1000, 1000); Reclamation.TimeSeries.Math.Offset(s, sel, System.Math.PI); for (int i = 0; i < values.Length; i++) { Assert.AreEqual(values[i] + System.Math.PI, s[i].Value, 0.000001, "offset failed"); } }
public void WeeklyAverageFromInstant() { string fn = Path.Combine(TestData.DataPath, "El686_2004InstantaniousStage.csv"); TextSeries s = new TextSeries(fn); Weekly(s); }
public void SouthForkBoiseSeptember() { Series s = TestData.SouthForkBoise; Assert.AreEqual(s.Count, 22346, " unexpected number of points"); Series s2 = Reclamation.TimeSeries.Math.Subset(s, new int[] { 7 }); // july. Series e = Reclamation.TimeSeries.Math.Sort(s2, RankType.Weibul); Point pt = e[0]; Assert.AreEqual(4030.0, pt.Value, 0.01); Assert.AreEqual(.05, pt.Percent, 0.01); Point min = e[e.Count - 1]; Assert.AreEqual(99.95, min.Percent, 0.01); Assert.AreEqual(117.00, min.Value, 0.01); // again with exceedance method. Series s3 = new TextSeries(Path.Combine(TestData.DataPath, "SouthForkOfBoiseNearFeatherville.txt")); Series e2 = s3.Exceedance(TimeSeriesDatabase.MinDateTime, TimeSeriesDatabase.MaxDateTime, new MonthDayRange(7, 1, 7, 31), RankType.Weibul); pt = e2[0]; Assert.AreEqual(4030.0, pt.Value, 0.01); Assert.AreEqual(.05, pt.Percent, 0.01); }
public void WeeklyAverageFromDaily() { string fn = Path.Combine(TestData.DataPath, "El686_2004DailyAverageStage.csv"); TextSeries s = new TextSeries(fn); Weekly(s); }
public void WeeklyAverageFromInstant() { string fn = TestData.DataPath + @"\El686_2004InstantaniousStage.csv"; TextSeries s = new TextSeries(fn); Weekly(s); }
private void buttonOpen_Click(object sender, EventArgs e) { this.buttonSave.Enabled = false; if (openFileDialog1.ShowDialog() == DialogResult.OK) { var fn = openFileDialog1.FileName; this.textBoxFilename.Text = fn; DataTable tbl = null; if (Path.GetExtension(fn).IndexOf("xls") >= 0) {// xls or xlsx (Excel) NpoiExcel xls = new NpoiExcel(fn); DataTable template = new DataTable("watertemp"); template.Columns.Add("DateTime", typeof(DateTime)); template.Columns.Add("val", typeof(double)); tbl = xls.ReadDataTable(0, template, true); } else if (Path.GetExtension(fn).IndexOf("csv") >= 0) { // csv //tbl = new CsvFile(fn, CsvFile.FieldTypes.AllText); var s = new TextSeries(fn); s.Read(); tbl = s.Table; } m_series = CreateSeries(tbl); this.dataGridView1.DataSource = m_series.Table; this.timeSeriesTeeChartGraph1.Series.Clear(); this.timeSeriesTeeChartGraph1.Series.Add(m_series); this.timeSeriesTeeChartGraph1.Draw(true); this.comboBoxPcode.SelectedIndex = -1; } }
private static Series ReadExternalSeriesData(string scenarioName, string filename, string externalSiteID) { Series s = new TextSeries(filename); s.Read(); s.Provider = "Series"; s.Table.TableName = (externalSiteID +"_"+ scenarioName).ToLower(); return s; }
public void DailyIntervalDetection() { string fn = TestData.DataPath + @"\El686_2004DailyAverageStage.csv"; TextSeries s = new TextSeries(fn); s.Read(); Assert.AreEqual(TimeInterval.Daily, s.TimeInterval); }
private static Series ReadExternalSeriesData(string scenarioName, string filename, string externalSiteID) { Series s = new TextSeries(filename); s.Read(); s.Provider = "Series"; s.Table.TableName = (externalSiteID + "_" + scenarioName).ToLower(); return(s); }
private void AddTextSeries(out Series s, out int c, out int sdi) { s = TextSeries.ReadFromFile(textFileName); s.Units = "cfs"; //s.Table.TableName = "ts"+; c = s.Count; Assert.IsTrue(s.Count > 0); sdi = db.AddSeries(s); }
public void AddSeriesDirectly() { Logger.EnableLogger(); var filename = FileUtility.GetTempFileNameInDirectory(@"c:\temp\", ".pdb"); if (File.Exists(filename)) { File.Delete(filename); } Console.WriteLine(filename); var server = new SQLiteServer(filename); var db = new TimeSeriesDatabase(server); // create a folder for each month for (int i = 1; i <= 12; i++) { var t = new DateTime(2015, i, 1); db.AddFolder("Months", t.ToString("MMMM")); } // Add USGS series (Boise River) to the January Folder Series s = new UsgsDailyValueSeries("13206000", UsgsDailyParameter.DailyMeanDischarge); s.SiteID = "13206000"; var folder = db.GetOrCreateFolder("Months", "January"); s.Read(DateTime.Parse("2015-01-01"), DateTime.Parse("2015-01-10")); db.AddSeries(s, folder); // Add Hydromet series to the February Folder s = new HydrometDailySeries("bhr", "af", HydrometHost.GreatPlains); s.Name = "gphyd_bhr_af"; s.SiteID = "gphyd_bhr"; var feb = db.GetOrCreateFolder("Months", "February"); db.AddSeries(s, feb); // Add Csv file data to March Folder. s = new TextSeries(@"c:\temp\test_river.csv"); s.Read();// read data. Use Read(t1,t2) to limit by dates s.SiteID = "test"; s.Units = "cfs"; s.Table.TableName = "test_river"; // table name needs to be unique db.AddSeries(s, db.GetOrCreateFolder("Months", "March")); s = db.GetSeriesFromName("gphyd_bhr_af"); s.Read(); Console.WriteLine(s.Count); // Add CSV file db.Inventory(); }
public void PartialDay() { string fn = Path.Combine(TestData.DataPath, "wilson.csv"); // var s = new ExcelDataReaderSeries(fn, "wilson", "A", "B"); var s = new TextSeries(fn); s.Read(); Series avg = Math.TimeWeightedDailyAverage(s); }
public void BelowDeadwoodDam() { string fn = Path.Combine(TestData.DataPath, "below Deadwood Dam.csv"); Console.WriteLine("reading " + fn); TextSeries s = new TextSeries(fn); s.Read(); Console.WriteLine("skipped the following\n" + s.Messages.ToString()); //s.WriteToConsole(); Assert.AreEqual(32000, s.Count); }
public void mmDDyy() { string fn = Path.Combine(TestData.DataPath, "mmddyy.csv"); Console.WriteLine("reading " + fn); TextSeries s = new TextSeries(fn); s.Read(); Console.WriteLine("skipped the following\n" + s.Messages.ToString()); s.WriteToConsole(); Assert.AreEqual(5, s.Count); }
public void TextFileDigitizedChart() { string fn = TestData.DataPath + "\\el68d_DigitizedChart.txt"; TextSeries s = new TextSeries(fn); s.Read(); //s.Save(TestData.DataPath +"\\el68d_DigitizedChart2.txt"); //1999/01/02 12:40:11, 4.969 DateTime t1 = Convert.ToDateTime("1999/01/02 12:40:11"); Assert.IsTrue(System.Math.Abs(s.Lookup(t1) - 4.969) < 0.001); }
public void ExcelCSV() { string fn = Path.Combine(TestData.DataPath, "mmddyyyyhhmmAMPM.txt"); Console.WriteLine("reading " + fn); TextSeries s = new TextSeries(fn); s.Read(); Console.WriteLine("skipped the following\n" + s.Messages.ToString()); Assert.AreEqual(9, s.Count); }
internal static SystemState DetermineSystemState(DateTime t) { if (t.Date == DateTime.Now.Date) { t = t.Date.AddDays(-1); // we dont' have daily value yet for today (use yesterday) } t = t.Date; // state is daily but we could be running previous day at 9:55 am var dir = GetPathToMinimumFlowFiles(); // read avearge contents for three reservoirs var avg = new TextSeries(Path.Combine(dir, "talsys_afavg.csv")); avg.Read(); var talsys_avg = new PeriodicSeries(avg.Table); var t1 = t.Date.AddDays(-1); // current last two days system contents var hmet = new HydrometDailySeries("talsys", "af"); hmet.Read(t1, t); // determine state. Point talsys = hmet[t]; if (talsys.IsMissing) { talsys = hmet[t.AddDays(-1).Date]; // try back one day } if (talsys.IsMissing) { return(SystemState.Unknown); } if (t.Month == 2 && t.Day == 29)// don't lookup 29th in periodic table { t = t.AddDays(-1); } double avg_af = talsys_avg.Interpolate(t); if (talsys.Value >= avg_af + 15000) { return(SystemState.Wet); } if (talsys.Value <= avg_af - 15000) { return(SystemState.Dry); } return(SystemState.Median); }
/// <summary> /// Imports multiple series using an excel control file. /// the control file has one entry per row(series) /// and specifies file format and other details for /// the series. /// </summary> /// <param name="db"></param> /// <param name="excelFilename"></param> public static void Import(TimeSeriesDatabase db, string excelFilename) { NpoiExcel xls = new NpoiExcel(excelFilename); var tbl = xls.ReadDataTable(0, true, true); for (int i = 0; i < tbl.Rows.Count; i++) { var row = tbl.Rows[i]; var format = ReadString(row, "format"); var units = ReadString(row, "units"); var folderName = ReadString(row, "folder"); var filename = ReadString(row, "filename"); if (!Path.IsPathRooted(filename)) { string dir = Path.GetDirectoryName(db.DataSource); filename = Path.Combine(dir, filename); } var siteID = ReadString(row, "siteid"); var name = ReadString(row, "name"); var sheetName = ReadString(row, "sheet"); Series s = null; if (format == "csv" || format == "txt") { s = new TextSeries(filename); s.Read(); } // else if( format == "xls-monthly-wateryear") // { // throw new NotImplementedException("oops the programmer forgot to finish up some work"); // } else if (format == "xls-daily-yearlysheets") { s = ImportMultiSheetDailySeriesExcel.ImportSpreadsheet(filename); } s.Units = units; s.Name = name; s.SiteID = siteID; s.Table.TableName = "ts_" + s.Name.ToLower(); var folder = db.RootFolder; if (folderName != "") { folder = db.GetOrCreateFolder(folder.Name, folderName); } db.AddSeries(s, folder); } }
public void StevensLogger() { string fn = TestData.DataPath + "\\StevensLogger.txt"; TextSeries s = new TextSeries(fn); s.Read(); Assert.AreEqual(13, s.Count); Assert.AreEqual(7.93, s["2007-12-31 22:40"].Value); Assert.AreEqual(1.51, s["2007-8-27 08:40"].Value); Assert.AreEqual(TimeInterval.Irregular, s.TimeInterval); }
public void SevenDay() { string fn = Path.Combine(TestData.DataPath, "sevendayavg.csv"); // var s = new ExcelDataReaderSeries(fn, "Sheet1", "A", "B"); var s = new TextSeries(fn); s.Read(); Assert.AreEqual(2738, s.Count); var s7 = Math.WeeklyAverageSimple(s); s7.WriteToConsole(); Assert.AreEqual(DateTime.Parse("2004-02-12 23:59:59.9"), s7[0].DateTime); Assert.AreEqual(2.17, s7[0].Value, 0.01); Assert.AreEqual(101.32, s7[1].Value, 0.01); }
public void DeepCopy() { string fn = Path.Combine(TestData.DataPath, "el68d_export.csv"); TextSeries s = new TextSeries(fn); s.Read(); s.Name = "First"; Series s2 = s.Copy(); s.Name = "Second"; Console.WriteLine("s.Name = " + s.Name); Console.WriteLine("s2.Name = " + s2.Name); Assert.IsTrue(s2.Name == "First"); Assert.IsTrue(s.Name == "Second"); }
/// <summary> /// Import a text file into an existing series /// </summary> private void importTextFile_Click(object sender, EventArgs e) { if (openTextFileDialog.ShowDialog() == DialogResult.OK) { Series s = TextSeries.ReadFromFile(openTextFileDialog.FileName); if (s.Count == 0) { MessageBox.Show("No data found in file:" + openTextFileDialog.FileName); return; } if (s.Count > 0) { DB.UpdateTimeSeriesTable(tree1.SelectedID, s, true); } } }
public void SevenDayMovingInstantSparse() { // string fn = Path.Combine(TestData.DataPath, "temp example 7 day max.xls"); // var s = new ExcelDataReaderSeries(fn, "sparse", "C", "D"); var s = new TextSeries(Path.Combine(TestData.DataPath, "temp_sparse.csv")); s.Read(); Series s2 = Reclamation.TimeSeries.Math.SevenDADMAX(s); //Series expected = new ExcelDataReaderSeries(fn, "7dadmax", "A", "B"); TextSeries expected = new TextSeries(Path.Combine(TestData.DataPath, "7dadmax.csv")); expected.Read(); Assert.AreEqual(expected[0].DateTime.Date, s2[0].DateTime.Date); Assert.AreEqual(PointFlag.Missing, s2[0].Flag); // }
/// <summary> /// Updates database if the original source file still exists and has /// been modified. /// </summary> protected override void UpdateCore(DateTime t1, DateTime t2, bool minimal) { Logger.WriteLine("Checking Excel series " + Name + " (" + ID + ") for updates"); string dir = Path.GetDirectoryName(m_db.DataSource); if (TextSeries.CanUpdateFromFile(ConnectionString, dir)) { Logger.WriteLine("Update: File has changed"); SpreadsheetGearSeries g = SpreadsheetGearSeries.CreateFromConnectionString(ConnectionString, dir); g.Read(); //m_db.Truncate(ID); ConnectionString = g.ConnectionString; ConnectionString = ConnectionStringUtility.MakeFileNameRelative(ConnectionString, m_db.DataSource); m_db.SaveProperties(this);// LastWriteTime proabably changed m_db.SaveTimeSeriesTable(ID, g, DatabaseSaveOptions.DeleteAllExisting); } }
/// <summary> /// Creates a new series from a text file /// </summary> void AddTextFileClick(object sender, System.EventArgs e) { if (openTextFileDialog.ShowDialog() == DialogResult.OK) { Series s = TextSeries.ReadFromFile(openTextFileDialog.FileName); if (s.Count == 0) { MessageBox.Show("No data found in file:" + openTextFileDialog.FileName); return; } if (s.Count > 0) { DB.AddSeries(s, CurrentFolder); } } }
/* * * Data Set Export - Flow.DayMean@13087505 Milner Lwr Pwr Plant at Milner - Range: 2017-05-12 00:00 - 2017-05-26 00:00 (UTC-07:00),,,,, * Data on this site may be provisional and subject to revision,,,,, * Timestamp (UTC-07:00),Value (Cubic Feet Per Second),Grade Code,Approval Level,Interpolation Type,Comment * 2017-05-12 00:00:00,5260,0,,8, * 2017-05-13 00:00:00,5250,0,,8, * 2017-05-14 00:00:00,5250,0,,8, * 2017-05-15 00:00:00,5260,0,,8, * 2017-05-16 00:00:00,5240,0,,8, * 2017-05-17 00:00:00,5240,0,,8, * 2017-05-18 00:00:00,5200,0,,8, * 2017-05-19 00:00:00,4290,0,,8, * 2017-05-20 00:00:00,2160,0,,8, * 2017-05-21 00:00:00,244,0,,8, * 2017-05-22 00:00:00,0,0,,8, * 2017-05-23 00:00:00,0,0,,8, * 2017-05-24 00:00:00,0,0,,8, * 2017-05-25 00:00:00,0,0,,8, * 2017-05-26 00:00:00,0,0,,8, */ private static Series ReadFromIdahoPower(string id, DateTime t1, DateTime t2) { //var url = "https://idastream.idahopower.com/Data/Export_Data/?dataset=18942&date=2017-05-12&endDate=2017-05-26&exporttype=csv&type=csv"; var url = "https://idastream.idahopower.com/Data/Export_Data/?dataset=" + id + "&date=" + t1.Date.ToString("yyyy-MM-dd") + "&endDate=" + t2.AddDays(1).ToString("yyyy-MM-dd") + "&exporttype=csv&type=csv"; var fn = DownloadAndUnzip(url); TextSeries s = new TextSeries(fn); s.Read(); s.Trim(t1, t2); return(s); }
public void AprilAverageHrsi() { Series s = new TextSeries(Path.Combine(TestData.DataPath, "hrsiDailyModsim.csv")); s.Read(); Assert.AreEqual(5418, s.Count,"reading file"); double[] expected ={ 11133.3, 8613.8, 14385.9, 10328.0, 9689.5, 12839.8, 10611.5, 13445.1, 13052.1, 10152.5, 12536.3, 7573.6, 11985.8, 10534.4, 9936.5, 12047.7, 13240.2, 10389.1, 13993.5, 10028.9, 9384.8, 12515.8, 14188.1, 7887.3, 9998.4, 13970.7, 7438.4, 10280.7, 9050.2, 10602.3, 10659.1, 7180.2, 11815.6, 14941.5, 11761.7, 10830.9, 11825.9, 11968.4, 9618.7, 9729.7, 10152.0, 14670.9, 12391.5, 16330.4, 14960.8, 14946.8, 16466.4, 10045.9, 17344.9, 10593.5, 15629.1, 10727.6, 11894.0, 11303.9, 10471.4, 10555.0, 15624.5, 13681.5, 14531.3, 10780.0, 13061.1, 11222.4, 14146.1}; MonthDayRange range = new MonthDayRange(4, 1, 4, 30); Series s2 = Reclamation.TimeSeries.Math.AggregateAndSubset(StatisticalMethods.Average, s, range, 10); Assert.AreEqual(63, s2.Count); Assert.AreEqual(1944, s2[0].DateTime.Year); for (int i = 0; i < s2.Count; i++) { Assert.AreEqual(expected[i], s2[i].Value, 0.1); } }
private static void Weekly(TextSeries s) { s.Read(DateTime.Parse("2/6/2004"), DateTime.Parse("12/31/2004")); double mf = Point.MissingValueFlag; Point.MissingValueFlag = -9999; s.RemoveMissing(); Point.MissingValueFlag = mf; s.TimeInterval = TimeInterval.Daily; var weekly = Math.WeeklyAverageSimple(s); Assert.AreEqual(TimeInterval.Weekly, weekly.TimeInterval); Assert.AreEqual(DateTime.Parse("2/12/2004"), weekly[0].DateTime.Date); Assert.AreEqual(2.172, weekly[0].Value, 0.01); }
public void SouthForkBoiseSeptember() { Series s = TestData.SouthForkBoise; Assert.AreEqual(s.Count, 22346, " unexpected number of points"); Series s2 = Reclamation.TimeSeries.Math.Subset(s, new int[] { 7 }); // july. Series e = Reclamation.TimeSeries.Math.Sort(s2, RankType.Weibul); Point pt = e[0]; Assert.AreEqual(4030.0, pt.Value,0.01); Assert.AreEqual(.05, pt.Percent,0.01); Point min = e[e.Count - 1]; Assert.AreEqual(99.95, min.Percent, 0.01); Assert.AreEqual(117.00, min.Value, 0.01); // again with exceedance method. Series s3 = new TextSeries(Path.Combine(TestData.DataPath, "SouthForkOfBoiseNearFeatherville.txt")); Series e2 = s3.Exceedance(TimeSeriesDatabase.MinDateTime, TimeSeriesDatabase.MaxDateTime, new MonthDayRange(7, 1, 7, 31), RankType.Weibul); pt = e2[0]; Assert.AreEqual(4030.0, pt.Value, 0.01); Assert.AreEqual(.05, pt.Percent, 0.01); }