public override IExplorerView Run() { Logger.WriteLine("MovingAverageAnalysis.Run()"); SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); view.Messages.Add(list.MissingRecordsMessage); SeriesList myList = new SeriesList(); for (int i = 0; i < list.Count; i++) { if (Explorer.PlotRaw) { myList.Add(list[i]); } if (Explorer.PlotMoving24HourAverage) { Series s24 = Math.MovingAvearge(list[i], 24); myList.Add(s24); } if (Explorer.PlotMoving120HourAverage) { Series s120 = Math.MovingAvearge(list[i], 120); myList.Add(s120); } } view.Title = "Moving Average\n" + list.Text.TitleText(); view.SubTitle = list.MissingRecordsMessage; view.SeriesList = myList; view.DataTable = myList.ToDataTable(true); return view; }
/// <summary> /// Creates a list of water year based data all aligned to year 2000 /// to allow comparison. /// </summary> /// <param name="list">intput series</param> /// <param name="years">water years</param> /// <param name="avg30yr">when true also includes 30 year average. If only 5 years are avaliable a 5 year average is created</param> /// <param name="beginningMonth">series starting month number</param> /// <returns></returns> public static SeriesList WaterYears(SeriesList list, int[] years, bool avg30yr, int beginningMonth, bool alwaysShiftTo2000 = false) { SeriesList wySeries = new SeriesList(); for (int j = 0; j < list.Count; j++) { for (int i = 0; i < years.Length; i++) { YearRange yr = new YearRange(years[i], beginningMonth); Series s = list[j]; s.Clear(); s.Read(yr.DateTime1, yr.DateTime2); Logger.WriteLine("Read() " + yr.ToString() + " count = " + s.Count); foreach (string msg in s.Messages) { Logger.WriteLine(msg); } if (s.Count > 0 && s.CountMissing() != s.Count) { Series s2 = TimeSeries.Math.ShiftToYear(s, 2000); if (years.Length == 1 && !alwaysShiftTo2000 && !avg30yr) { s2 = s; } if (list.HasMultipleSites) s2.Appearance.LegendText = years[i].ToString() + " " + list[j].Name; else s2.Appearance.LegendText = years[i].ToString(); wySeries.Add(s2); } else { Logger.WriteLine("year :" + years[i] + "skipping series with no data " + s.Name + " " + s.Parameter); } } if (avg30yr) { list[j].Read(DateTime.Now.Date.AddYears(-30), DateTime.Now.Date); Series s30 = Math.MultiYearDailyAverage( list[j], beginningMonth); if (s30.Count > 0) wySeries.Add(s30); } } wySeries.Type = SeriesListType.WaterYears; if (wySeries.Count > 1) { wySeries.DateFormat = "MM/dd"; } return wySeries; }
public override IExplorerView Run() { Logger.WriteLine("SummaryHydrographAnalysis.Run()"); SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); if (Explorer.SelectedSeries.Length == 1 && Explorer.MergeSelected) { // merge single Year Traces. list.RemoveMissing(); var s = list.MergeYearlyScenarios(); list = new SeriesList(); list.Add(s); } view.Messages.Add(list.MissingRecordsMessage); string title = list.Text.TitleText(); string subTitle = list.MissingRecordsMessage; SeriesList myList = new SeriesList(); list.RemoveMissing(); if (Explorer.AlsoPlotYear && list.Count == 1) { YearRange yearRng = new YearRange(Explorer.PlotYear, Explorer.BeginningMonth); DateTime t1 = yearRng.DateTime1; DateTime t2 = yearRng.DateTime2; Series s = Math.Subset(list[0], t1, t2); s.Appearance.LegendText = yearRng.Year.ToString(); view.Messages.Add(yearRng.Year.ToString() + " included as separate series "); myList.Add(s); myList.Add(list.SummaryHydrograph(Explorer.ExceedanceLevels, t1, Explorer.PlotMax, Explorer.PlotMin, Explorer.PlotAvg,true));//,true)); } else { DateTime t = new DateTime(DateTime.Now.Year, Explorer.BeginningMonth, 1); myList = list.SummaryHydrograph(Explorer.ExceedanceLevels, t, Explorer.PlotMax, Explorer.PlotMin, Explorer.PlotAvg,true);//,true); } Explorer.WriteProgressMessage("drawing graph", 80); view.Title = title; view.SubTitle = subTitle; view.SeriesList = myList; view.DataTable = myList.ToDataTable(true); //view.Draw(); return view; }
public override IExplorerView Run() { SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); if (Explorer.SelectedSeries.Length == 1 && Explorer.MergeSelected) { // merge single Year Traces. var s = list.MergeYearlyScenarios(); list = new SeriesList(); list.Add(s); } SeriesList myList = list; if (Explorer.StatisticalMethods != StatisticalMethods.None) { myList = list.AggregateAndSubset(Explorer.StatisticalMethods, Explorer.MonthDayRange, Explorer.BeginningMonth); } Logger.WriteLine("Drawing Graph"); if (myList.Count == 1 && myList[0].TimeInterval == TimeInterval.Monthly) { myList.DateFormat = "MMM-yyyy"; } view.SeriesList = myList; string title = list.Text.TitleText(); if (Explorer.SubtractFromBaseline) title = "Subtract Reference \n" + title; view.Title = title; view.SubTitle = list.MissingRecordsMessage; //view.DataTable = myList.CompositeTable; return view; }
public static SeriesList GetSeries(string[] sites, UsgsRealTimeParameter parameter) { SeriesList rval = new SeriesList(); foreach (string site in sites) { UsgsRealTimeSeries s = new UsgsRealTimeSeries(site, parameter); rval.Add(s); } return rval; }
public void PiecewisePolynomialRatingEquationLindCouleeWasteway1() { PolynomialEquation eq1 = new PolynomialEquation( new double[]{0.0},-1.0, 1.86 ,"-1 < stage <= 1.86 "); PolynomialEquation eq2 = new PolynomialEquation( new double[]{-28.4314,15.2857},1.861, 2.00," 1.86 < stage <= 2.0"); PolynomialEquation eq3 = new PolynomialEquation( new double[]{-0.3522,88.1421,-96.6995,31.4217,-2.3978},2.001, 6.00," 2.0 < stage <= 6.0 "); PolynomialEquation eq4 = new PolynomialEquation( new double[]{-769.4138,249.0490},6.001, 10.00," 6.0 < stage "); PolynomialEquation[] equationList = {eq1,eq2,eq3,eq4}; Series s = TestData.LindCouleeWW1DailyAverageStage2004; Series instant = TestData.LindCouleeWW1InstantanousStage2004; DateTime t1 = new DateTime(2004,1,2); DateTime t2 = new DateTime(2004,12,18); // at 12:00 am.. will capture 17th..not 18 th // compute polynomial based on daily average stage. Series p = Math.Polynomial(s,equationList,t1,t2); // compute instantanious flow first Series p2 = Math.Polynomial(instant,equationList,t1,t2); // get average second Series avg = Math.TimeWeightedDailyAverage(p2); SeriesList list = new SeriesList(); list.Add(s); list.Add(p); list.Add(avg); list.WriteToConsole(); //p.WriteToConsole(); }
public void LindCoulee2004() { Series s = TestData.LindCouleeWW1InstantanousStage2004; //Point pt = Math.Calculator.AverageForDay(s,DateTime.Parse("2004-12-20")); Series avg = Math.TimeWeightedDailyAverage(s); // Console.WriteLine("avg"); //avg.WriteToConsole(); Console.WriteLine(avg[0].DateTime.ToString("yyyy-MM-dd HH:mm:ss.ffff")); Console.WriteLine("Math.Calculator.DailyAverage(s).Count = "+avg.Count); Series dbAverage = TestData.LindCouleeWW1DailyAverageStage2004; Console.WriteLine("TestData.LindCouleeWW1DailyAverageStage2004.Count = "+dbAverage.Count); Series diff = avg - dbAverage; SeriesList list = new SeriesList(); list.Add(avg); list.Add(dbAverage); list.Add(diff); list.WriteToConsole(); Console.WriteLine("summing difference"); double d = Math.Sum(diff); Assert.AreEqual(0,d,0.1); // actual is about 0.05 Console.WriteLine("sum of differences = "+d); Console.WriteLine("sum of daily "+Math.Sum(avg)); Assert.AreEqual(dbAverage.Count-1,avg.Count); for(int i=0;i<avg.Count; i++) { // database has one (missing) value at beginning we skip that in comparison Assert.AreEqual(dbAverage[i+1].ToString(),avg[i].ToString()); Assert.AreEqual(dbAverage[i+1].Value,avg[i].Value,0.0001); Assert.AreEqual(dbAverage[i+1].DateTime.Ticks , avg[i].DateTime.Ticks,"on line "+i); } }
private void buttonGetEspData_Click(object sender, EventArgs e) { this.toolStripStatusLabel1.Text = "Downloading ESP data..."; this.UseWaitCursor = true; string rfcURL = @"https://www.nwrfc.noaa.gov/chpsesp/ensemble/"; if (this.radioButtonNFcast.Checked) { rfcURL += "natural/"; } else { rfcURL += "watersupply/"; } rfcURL += this.textBoxRfcNode.Text + ".ESPF" + this.comboBoxEspDay.SelectedItem.ToString() + ".csv"; HttpWebRequest req = (HttpWebRequest)WebRequest.Create(rfcURL); HttpWebResponse resp = (HttpWebResponse)req.GetResponse(); StreamReader sr = new StreamReader(resp.GetResponseStream()); string rfcDataString = sr.ReadToEnd(); List <string> rfcData = new List <string>(); string[] rfcDataRows = rfcDataString.Split('\n'); //this.labelEspFile.Text = rfcDataRows[0].ToString().Replace("FILE:",""); //this.labelEspUpdated.Text = rfcDataRows[1].ToString().Replace("ISSUED:", ""); yearList = rfcDataRows[6].Split(',').ToList(); int colCount = yearList.Count(); // Build SeriesList container SeriesList sList = new SeriesList(); for (int i = 1; i < colCount; i++) { Series s = new Series(); s.Name = yearList[i].ToString(); sList.Add(s); } // Populate Series List for (int i = 7; i < rfcDataRows.Count() - 1; i++) { var rowData = rfcDataRows[i].Split(','); var t = DateTime.Parse(rowData[0]); for (int j = 1; j < colCount; j++) { sList[j - 1].Add(t, Convert.ToDouble(rowData[j]) * 1000.0); } } // Aggregate Series List to Daily espList = new SeriesList(); foreach (var series in sList) { var s = Reclamation.TimeSeries.Math.DailyAverage(series); s.Name = series.Name; espList.Add(s); } // Create statistical Series var sMin = new Series("MIN"); var sMax = new Series("MAX"); var sAvg = new Series("AVG"); var sP10 = new Series("P10"); var sP25 = new Series("P25"); var sP50 = new Series("P50"); var sP75 = new Series("P75"); var sP90 = new Series("P90"); foreach (Point pt in espList[0]) { DateTime ithT = pt.DateTime; List <double> ithVals = new List <double>(); foreach (Series s in espList) { ithVals.Add(s[ithT].Value); } sMin.Add(ithT, ithVals.Min()); sAvg.Add(ithT, ithVals.Average()); sMax.Add(ithT, ithVals.Max()); ithVals.Sort(); sP10.Add(ithT, ithVals[Convert.ToInt32(System.Math.Floor(ithVals.Count * 0.10))]); sP25.Add(ithT, ithVals[Convert.ToInt32(System.Math.Floor(ithVals.Count * 0.25))]); sP50.Add(ithT, ithVals[Convert.ToInt32(System.Math.Floor(ithVals.Count * 0.50))]); sP75.Add(ithT, ithVals[Convert.ToInt32(System.Math.Floor(ithVals.Count * 0.75))]); sP90.Add(ithT, ithVals[Convert.ToInt32(System.Math.Floor(ithVals.Count * 0.90))]); } yearList.AddRange(new List <string> { "MIN", "P10", "P25", "P50", "P75", "P90", "MAX", "AVG" }); espList.Add(new SeriesList() { sMin, sP10, sP25, sP50, sP75, sP90, sMax, sAvg }); // Finalize FillEspDropDown(); this.toolStripStatusLabel1.Text = "Downloaded " + rfcDataRows[0].ToString().Replace("FILE:", "") + ". Updated " + rfcDataRows[1].ToString().Replace("ISSUED:", ""); this.UseWaitCursor = false; }
public override IExplorerView Run() { Logger.WriteLine("TraceAnalysis.Run()"); SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); string title = list.Text.TitleText(); string subTitle = list.MissingRecordsMessage; // [JR] don't perform trace analysis if trace count < 10... if (list.Count < 10) { view.Messages.Add("Trace exceedance analysis is not available if trace count < 10"); view.Title = title; view.SubTitle = subTitle; view.SeriesList = list; view.DataTable = list.ToDataTable(true); return(view); } // This seems to be common between all the analysis options if (Explorer.SelectedSeries.Length == 1 && Explorer.MergeSelected) { // merge single Year Traces. list.RemoveMissing(); var s = list.MergeYearlyScenarios(); list = new SeriesList(); list.Add(s); } view.Messages.Add(list.MissingRecordsMessage); list.RemoveMissing(); // Initialize the output container SeriesList traceAnalysisList = new SeriesList(); // Get exceedance curves if (Explorer.traceExceedanceAnalysis) { traceAnalysisList = getTraceExceedances(list, Explorer.ExceedanceLevels, Explorer.AlsoPlotTrace, Explorer.PlotTrace, Explorer.PlotMinTrace, Explorer.PlotAvgTrace, Explorer.PlotMaxTrace); } // Get aggregated values if (Explorer.traceAggregationAnalysis) { string sumType = ""; if (Explorer.sumCYRadio) { sumType = "CY"; } else if (Explorer.sumWYRadio) { sumType = "WY"; } else if (Explorer.sumCustomRangeRadio) { sumType = "XX"; } else { } traceAnalysisList = getTraceSums(list, sumType); } // [JR] Add other analysis/report building options here... Explorer.WriteProgressMessage("drawing graph", 80); view.Title = title; view.SubTitle = subTitle; view.SeriesList = traceAnalysisList; view.DataTable = traceAnalysisList.ToDataTable(true); //view.Draw(); return(view); }
/// <summary> /// converts DMS3 formated data for 'dayflag.exe' prorgram into SeriesList /// each series is named instant_cbtt_pcode, /// for example instant_jck_fb /// </summary> /// <param name="fileName"></param> /// <returns></returns> public static SeriesList HydrometDMS3DataToSeriesList(TextFile tf) { var rval = new SeriesList(); for (int i = 1; i < tf.Length; i++) // skip first row (header) { if (tf[i].Length < 59) { Console.WriteLine("Skipping invalid line: " + tf[i]); continue; } var strDate = tf[i].Substring(0, 14); DateTime t; if (!DateTime.TryParseExact(strDate, "yyyyMMMdd HHmm", new CultureInfo("en-US"), System.Globalization.DateTimeStyles.None, out t)) { Console.WriteLine("Bad Date, Skipping line: " + tf[i]); continue; } string cbtt = tf[i].Substring(15, 8).Trim(); string pcode = tf[i].Substring(24, 9).Trim(); string strValue = tf[i].Substring(34, 10); string strFlagCode = tf[i].Substring(56, 3); double val = 0; if (!double.TryParse(strValue, out val)) { Console.WriteLine("Error parsing double " + strValue); continue; } string name = "instant_" + cbtt + "_" + pcode; name = name.ToLower(); var idx = rval.IndexOfTableName(name); Series s; if (idx >= 0) { s = rval[idx]; } else { s = new Series(); s.SiteID = cbtt; s.Parameter = pcode; s.Name = cbtt + "_" + pcode; s.Name = s.Name.ToLower(); s.Table.TableName = name; rval.Add(s); } string flag = DayFiles.FlagFromCode(strFlagCode); if (s.IndexOf(t) < 0) { s.Add(t, val, flag); } else { Logger.WriteLine(s.SiteID + ":" + s.Parameter + "skipped duplicate datetime " + t.ToString()); } } return(rval); }
internal Series ConvertToDaily() { Series estimatedDaily = new Series(); estimatedDaily.HasFlags = true; estimatedDaily.TimeInterval = TimeInterval.Daily; if (FillMissingWithZero) { daily = Math.FillMissingWithZero(daily, daily.MinDateTime, daily.MaxDateTime); } else { daily.RemoveMissing(); } //daily.RemoveMissing(); //int[] levels = {5,10,20,30,40,50,60,70,80,90,95}; //int[] levels = {10,20,30,40,50,60,70,80,90}; List <int> levels = new List <int>(); if (MedianOnly) { levels.Add(50); } else { for (int i = 5; i <= 95; i += 2) { levels.Add(i); } } var sHydrograph = Math.SummaryHydrograph(daily, levels.ToArray(), new DateTime(2008, 1, 1), false, false, false, false);//, false); var summaryHydrographTable = sHydrograph.ToDataTable(true); for (int i = 1; i < summaryHydrographTable.Columns.Count; i++) { summaryHydrographTable.Columns[i].ColumnName = levels[i - 1].ToString(); } //DataTableOutput.Write(summaryHydrographTable, @"c:\temp\junk.csv", false); SeriesList monthlySum = new SeriesList(); for (int i = 0; i < sHydrograph.Count; i++) { Series sum = Math.MonthlyValues(sHydrograph[i], Math.Sum); sum.Name = levels[i].ToString(); monthlySum.Add(sum); } var monthlyExceedanceSums = monthlySum.ToDataTable(true); if (monthlySum.Count == 1 && levels.Count == 1) { monthlyExceedanceSums.Columns[1].ColumnName = levels[0].ToString(); } var monthlyTable = monthly.Table; DateTime t = monthly.MinDateTime; DateTime t2 = monthly.MaxDateTime; t2 = new DateTime(t2.Year, t2.Month, DateTime.DaysInMonth(t2.Year, t2.Month)); while (t < t2) { var tm = new DateTime(t.Year, t.Month, 1); if (monthly.IndexOf(tm) < 0) { estimatedDaily.AddMissing(t); } else { double mv = monthly[tm].Value; double mvcfsdays = mv / 1.98347; double exceedanceValue = 0; int exceedancePercent = LookupExceedance(monthlyExceedanceSums, t, mvcfsdays, out exceedanceValue); double ratio = 0; if (exceedanceValue != 0) { ratio = mvcfsdays / exceedanceValue; } else { ratio = 0; } double shcfs = LookupSummaryHydrograph(summaryHydrographTable, t, exceedancePercent); estimatedDaily.Add(t, shcfs * ratio, "scaled with " + exceedancePercent + "%"); } t = t.AddDays(1); } VerifyWithMonthlyVolume(monthly, estimatedDaily); // SmoothSpikes(monthly, daily, estimatedDaily); return(estimatedDaily); }
public override IExplorerView Run() { Logger.WriteLine("SummaryHydrographAnalysis.Run()"); SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); if (Explorer.SelectedSeries.Length == 1 && Explorer.MergeSelected) { // merge single Year Traces. list.RemoveMissing(); var s = list.MergeYearlyScenarios(); list = new SeriesList(); list.Add(s); } view.Messages.Add(list.MissingRecordsMessage); string title = list.Text.TitleText(); string subTitle = list.MissingRecordsMessage; SeriesList myList = new SeriesList(); list.RemoveMissing(); if (Explorer.AlsoPlotYear && list.Count == 1) { int[] yearsToPlot = Explorer.PlotYear; int xtraYearCount = 0; DateTime tSumHyd1 = DateTime.Now; DateTime tSumHyd2 = DateTime.Now; foreach (var year in yearsToPlot) { YearRange yearRng = new YearRange(year, Explorer.BeginningMonth); DateTime t1 = yearRng.DateTime1; DateTime t2 = yearRng.DateTime2; Series s = Math.Subset(list[0], t1, t2); if (xtraYearCount == 0)//first series { s.Appearance.LegendText = yearRng.Year.ToString(); view.Messages.Add(yearRng.Year.ToString() + " included as separate series "); myList.Add(s); if (yearsToPlot.Length == 1) { myList.Add(list.SummaryHydrograph(Explorer.ExceedanceLevels, t1, Explorer.PlotMax, Explorer.PlotMin, Explorer.PlotAvg, true)); } else { myList.Add(list.SummaryHydrograph(new int[] { }, t1, false, false, false, true)); } tSumHyd1 = t1; tSumHyd2 = t2; } else//every series { Series sDummy = new Series(); foreach (Point pt in s) { if (!(pt.DateTime.Month == 2 && pt.DateTime.Day == 29)) //sigh... leap days... { sDummy.Add(pt.DateTime.AddYears(tSumHyd1.Year - t1.Year), pt.Value); } } sDummy.TimeInterval = s.TimeInterval; sDummy.Name = s.Name; sDummy.Units = s.Units; sDummy.Parameter = s.Parameter; sDummy.Appearance.LegendText = yearRng.Year.ToString();; view.Messages.Add(yearRng.Year.ToString() + " included as separate series "); myList.Add(sDummy); if (xtraYearCount == yearsToPlot.Length - 1)//last series { myList.Add(list.SummaryHydrograph(Explorer.ExceedanceLevels, tSumHyd1, Explorer.PlotMax, Explorer.PlotMin, Explorer.PlotAvg, true)); } else { myList.Add(list.SummaryHydrograph(new int[] { }, tSumHyd1, false, false, false, true)); } } xtraYearCount++; } } else { DateTime t = new DateTime(DateTime.Now.Year, Explorer.BeginningMonth, 1); myList = list.SummaryHydrograph(Explorer.ExceedanceLevels, t, Explorer.PlotMax, Explorer.PlotMin, Explorer.PlotAvg, true);//,true); } Explorer.WriteProgressMessage("drawing graph", 80); view.Title = title; view.SubTitle = subTitle; view.SeriesList = myList; view.DataTable = myList.ToDataTable(true); //view.Draw(); return(view); }
private void ReadFromHydromet() { //DateTime t1 = startDate.AddDays(minOffset); // usually negative.. SeriesList list = new SeriesList(); for (int i = 0; i < cbtt.Length; i++) { HydrometDailySeries s = new HydrometDailySeries(cbtt[i], pcode[i], this.server); DateTime t1 = startDate.AddDays(daysOffset[i]); DateTime t2 = t1.AddDays(dayCount[i] - 1); if (!hasCount[i]) { t2 = endDate; } if (dayCount[i] < 1 && hasCount[i]) { Console.WriteLine("Warning: The number of days requested was " + dayCount[i] + "from hydromet"); } s.Read(t1, t2); if (s.Count < dayCount[i] && hasCount[0]) { Console.WriteLine("Warning: the requested hydromet data is missing."); } list.Add(s); } //Reclamation.PNHydromet.HydrometDaily.BulkRead(cbtt, pcode, t1, startDate, false,server); //FormPrevew p = new FormPrevew(list); //p.ShowDialog(); WriteToRiverwareFiles(list); }
internal Series ConvertToDaily() { Series estimatedDaily = new Series(); estimatedDaily.HasFlags = true; estimatedDaily.TimeInterval = TimeInterval.Daily; if (FillMissingWithZero) { daily = Math.FillMissingWithZero(daily, daily.MinDateTime, daily.MaxDateTime); } else { daily.RemoveMissing(); } //daily.RemoveMissing(); //int[] levels = {5,10,20,30,40,50,60,70,80,90,95}; //int[] levels = {10,20,30,40,50,60,70,80,90}; List<int> levels = new List<int>(); if (MedianOnly) { levels.Add(50); } else { for (int i = 5; i <= 95; i += 2) { levels.Add(i); } } var sHydrograph = Math.SummaryHydrograph(daily, levels.ToArray(), new DateTime(2008, 1, 1), false, false, false, false);//, false); var summaryHydrographTable = sHydrograph.ToDataTable(true); for (int i = 1; i < summaryHydrographTable.Columns.Count; i++) { summaryHydrographTable.Columns[i].ColumnName = levels[i - 1].ToString(); } //DataTableOutput.Write(summaryHydrographTable, @"c:\temp\junk.csv", false); SeriesList monthlySum = new SeriesList(); for (int i = 0; i < sHydrograph.Count; i++) { Series sum = Math.MonthlyValues(sHydrograph[i], Math.Sum); sum.Name = levels[i].ToString(); monthlySum.Add(sum); } var monthlyExceedanceSums = monthlySum.ToDataTable(true); if (monthlySum.Count == 1 && levels.Count == 1) monthlyExceedanceSums.Columns[1].ColumnName = levels[0].ToString(); var monthlyTable = monthly.Table; DateTime t = monthly.MinDateTime; DateTime t2 = monthly.MaxDateTime; t2 = new DateTime(t2.Year, t2.Month, DateTime.DaysInMonth(t2.Year, t2.Month)); while (t < t2) { var tm = new DateTime(t.Year, t.Month, 1); if (monthly.IndexOf(tm) < 0) { estimatedDaily.AddMissing(t); } else { double mv = monthly[tm].Value; double mvcfsdays = mv / 1.98347; double exceedanceValue = 0; int exceedancePercent = LookupExceedance(monthlyExceedanceSums, t, mvcfsdays, out exceedanceValue); double ratio = 0; if (exceedanceValue != 0) ratio = mvcfsdays / exceedanceValue; else ratio = 0; double shcfs = LookupSummaryHydrograph(summaryHydrographTable, t, exceedancePercent); estimatedDaily.Add(t, shcfs * ratio,"scaled with "+exceedancePercent+"%"); } t = t.AddDays(1); } VerifyWithMonthlyVolume(monthly, estimatedDaily); // SmoothSpikes(monthly, daily, estimatedDaily); return estimatedDaily; }
/// <summary> /// Create a Series List /// </summary> /// <param name="input"></param> input from the command line /// <param name="interval"></param> time interval /// <returns></returns> private SeriesList CreateSeriesList(CommandLineInput input, TimeInterval interval) { List<TimeSeriesName> names = new List<TimeSeriesName>(); foreach (var cbtt in input.SiteList) { foreach (var pcode in input.Parameters) { string sInterval = TimeSeriesName.GetTimeIntervalForTableName(interval); TimeSeriesName tn = new TimeSeriesName(cbtt + "_" + pcode,sInterval); names.Add(tn); } } var tableNames = (from n in names select n.GetTableName()).ToArray(); var sc = m_db.GetSeriesCatalog("tablename in ('" + String.Join("','", tableNames) + "')"); SeriesList sList = new SeriesList(); foreach (var tn in names) { Series s = new Series(); s.TimeInterval = interval; if (sc.Select("tablename = '" + tn.GetTableName() + "'").Length == 1) { s = m_db.GetSeriesFromTableName(tn.GetTableName()); } s.Table.TableName = tn.GetTableName(); sList.Add(s); } return sList; }
private void ReadFromPisces() { Logger.WriteLine("opening " + m_dbName); SQLiteServer svr = new SQLiteServer(m_dbName); TimeSeriesDatabase db = new TimeSeriesDatabase(svr); SeriesList list = new SeriesList(); for (int i = 0; i < m_seriesName.Count; i++) { Logger.WriteLine("looking for series '" + m_seriesName[i] + "'"); var s = db.GetSeriesFromName(m_seriesName[i]); if (s != null) { s.Read(m_t1, m_t2); list.Add(s); } else { throw new Exception("unable to find series '" + m_seriesName[i] + "' in pisces database '" + m_dbName + "'"); } } WriteToRiverwareFiles(list); }
/// <summary> /// MLR Interpolation Report /// Look for '[JR]' in this method to find the code regions that could use a fix or more testing... /// </summary> /// <param name="sInputs"></param> /// <param name="t1"></param> /// <param name="t2"></param> /// <param name="months"></param> /// <param name="fitTolerance"></param> /// <param name="waterYear"></param> public static MultipleLinearRegressionResults MlrInterpolation(SeriesList sList, int[] months, double fitTolerance, bool fillSelectedMonths = false) { // KT if there is not enough data (for example only 1 pont ) need to ignore that data set? MultipleLinearRegressionResults rval = new MultipleLinearRegressionResults(); // Populate SeriesLists var sListFill = new SeriesList(); foreach (var item in sList) { sListFill.Add(item.Copy()); } // Get dates to be filled with interpolated values var missing = sList[0].GetMissing(); if (fillSelectedMonths) //overwrites the 'missing' variable with another Series that only contains the selected dates in the input { Series missingSubset = new Series(); foreach (var row in missing) { if (months.Contains(row.DateTime.Month)) { missingSubset.Add(row); } } missing = missingSubset; } // Delete common dates where at least 1 data point is missing for any of the input series // This is done because the MLR routine does not support missing data. Missing data causes // data misalignments and throws off the regression... This section also deletes data for // months that are not tagged in the input for (int i = sList[0].Count - 1; i >= 0; i--) //start from the bottom of the list to bypass indexing problems { for (int j = 0; j < sList.Count; j++) { Point jthPt = sList[j][i]; if (jthPt.IsMissing || !months.Contains(jthPt.DateTime.Month)) { for (int k = 0; k < sList.Count; k++) //delete this date from all Series in the list { sList[k].RemoveAt(i); } break; } } } // Initialize MLR report and populate header List <string> mlrOut = new List <string>(); mlrOut.Add(""); mlrOut.Add("MLR Output\t\t\t\t\tRun Date: " + DateTime.Now); mlrOut.Add("Estimated Series: " + sList[0].Name); var sEstimators = ""; for (int i = 1; i < sList.Count; i++) { sEstimators = sEstimators + sList[i].Name + ", "; } mlrOut.Add("Estimator Series: " + sEstimators.Remove(sEstimators.Length - 2)); mlrOut.Add("Regression Date Range: " + sList[0].MinDateTime + " - " + sList[0].MaxDateTime); var monEstimators = ""; foreach (var item in months) { monEstimators = monEstimators + item + ", "; } mlrOut.Add("Months Used: " + monEstimators.Remove(monEstimators.Length - 2)); mlrOut.Add(""); mlrOut.Add("===================================================================================="); // Initialize output SeriesList var sOutList = new SeriesList(); // Loop through each SeriesList combination for MLR for (int k = 1; k <= sList.Count - 1; k++) { AllPossibleCombination combinationData = new AllPossibleCombination(sList.Count - 1, k); //uses StackOverflow Class for combinations var combinationList = combinationData.GetCombinations(); // Loop through each combination in the list and run MLR foreach (var combo in combinationList) { // Build MLR method inputs // xData is the different Series values that will be used to generate the MLR equation, all index > 0 in the SeriesList. Matrix format // yData is the target Series values that is the target for MLR, index = 0 of the SeriesList. Vector format double[][] xData = new double[sList[0].Count][]; double[] yData = new double[sList[0].Count]; // Loop through the dates to populate the xData and the yData for (int i = 0; i < sList[0].Count; i++) { var jthRow = new List <double>(); // Loop through each Series in SeriesList for (int j = 0; j < combo.Count(); j++) { jthRow.Add(sList[combo[j]][i].Value); } xData[i] = jthRow.ToArray(); yData[i] = sList[0][i].Value; } // MLR via Math.Net.Numerics double[] mlrCoeffs = MathNet.Numerics.LinearRegression.MultipleRegression.QR(xData, yData, true); //this is more stable than the method below //double[] p2 = MathNet.Numerics.Fit.MultiDim(xData, yData, true); //this method is faster but less stable // Evaluate fit Series sModeled = sList[0].Clone(); // Equations are of the form y = x1(s1) + x2(s2) + ... + xN the loop handles the inner part of the equation if it exists x2(s2) + ... // while the lines before and after the loop handles the first and last terms x1(s1) and xN respectively sModeled = sList[combo[0]] * mlrCoeffs[1]; for (int i = 2; i < mlrCoeffs.Count(); i++) { sModeled = sModeled + sList[combo[i - 1]] * mlrCoeffs[i]; } //magic number -1 is used so the correct corresponding Series is used with the correct mlr-coefficient sModeled = sModeled + mlrCoeffs[0]; var rVal = MathNet.Numerics.GoodnessOfFit.R(sModeled.Values, sList[0].Values); //this is the statistic reported by the FORTRAN code var rSqd = MathNet.Numerics.GoodnessOfFit.RSquared(sModeled.Values, sList[0].Values); //this is the R-squared for model fit // Fill missing dates and generate a SeriesList for final Series output var sOut = new Series(); //initialize Series to be added to output SeriesList foreach (var fillT in missing) { double fillVal; try { // This evaluates the equation generated during the MLR estimation. Same equation-code format as above fillVal = sListFill[combo[0]][fillT.DateTime].Value * mlrCoeffs[1]; for (int i = 2; i < mlrCoeffs.Count(); i++) { fillVal = fillVal + sListFill[combo[i - 1]][fillT.DateTime].Value * mlrCoeffs[i]; } fillVal = fillVal + mlrCoeffs[0]; if (fillVal < 0.0) { sOut.Add(fillT.DateTime, Point.MissingValueFlag, "NoDataForInterpolation"); } else { sOut.Add(fillT.DateTime, fillVal, rVal.ToString("F05")); } //[JR] this assigns the R value as the flag, can be switched to R-Squared... } catch { sOut.Add(fillT.DateTime, Point.MissingValueFlag, "NoDataForInterpolation"); } } // Add the output Series to a SeriesList sOutList.Add(sOut); // Populate report mlrOut.Add(""); string equationString = "MLR Equation: " + sList[0].Name + " = "; for (int ithCoeff = 1; ithCoeff < mlrCoeffs.Count(); ithCoeff++) { equationString = equationString + mlrCoeffs[ithCoeff].ToString("F04") + "(" + sList[combo[ithCoeff - 1]].Name + ") + "; } equationString = equationString + mlrCoeffs[0].ToString("F04"); mlrOut.Add(equationString); mlrOut.Add("Correlation Coefficient = " + rVal.ToString("F04")); mlrOut.Add("R-Squared Coefficient = " + rSqd.ToString("F04")); mlrOut.Add("MLR Estimates: "); foreach (var item in sOut) { mlrOut.Add("\t\t" + item.ToString(true)); } mlrOut.Add(""); mlrOut.Add("------------------------------------------------------------------------------------"); } } // Generate MLR report //TextFile tf = new TextFile(mlrOut.ToArray()); //var fn = FileUtility.GetTempFileName(".txt"); //tf.SaveAs(fn); //System.Diagnostics.Process.Start(fn); rval.Report = mlrOut.ToArray(); // Generate output Series var sOutFinal = sListFill[0].Copy(); // Rmove the Points to be filled in the original input Series for (int i = missing.Count - 1; i >= 0; i--) { sOutFinal.RemoveAt(sOutFinal.IndexOf(missing[i].DateTime)); } // Find the best fit out of all the estimated values // Loops through the dates foreach (var sRow in sOutList[0]) { DateTime estT = sRow.DateTime; List <double> flagItems = new List <double>(); //container for flag values List <double> valItems = new List <double>(); //container for estiamted values // Loops through each estimate for (int i = 0; i < sOutList.Count; i++) { Point estPt = sOutList[i][estT]; valItems.Add(estPt.Value); if (estPt.Value < 0.0) //add 0 correlation value if the estimated value < 0, [JR] this prevents the use of this routine to estimate negative values... { flagItems.Add(0.0); } else { flagItems.Add(Convert.ToDouble(estPt.Flag)); } } var maxFit = flagItems.Max(); var bestFitVal = valItems[flagItems.IndexOf(maxFit)]; if (maxFit >= fitTolerance) //add the value if it exceeds the specified tolerance { sOutFinal.Add(estT, bestFitVal, "E"); } else //add missing since there is no acceptable estimate to fill this missing value { sOutFinal.AddMissing(estT); } } //return sOutFinal; rval.EstimatedSeries = sOutFinal; return(rval); }
public void HydrometAutoUpdate() { var t1 = new DateTime(1980, 10, 1); var t2 = new DateTime(1980, 10, 2); Series s1 = new Reclamation.TimeSeries.Hydromet.HydrometDailySeries("jck", "af"); s1.Read(t1, t2); int sdi1 = db.AddSeries(s1); Series s2 = new Reclamation.TimeSeries.Hydromet.HydrometDailySeries("pal", "af"); s2.Read(t1, t2); int sdi2 = db.AddSeries(s2); s1 = db.GetSeries(sdi1); s2 = db.GetSeries(sdi2); HydrometInfoUtility.AutoUpdate = true; t2 = t2.AddHours(24);// reservoir contents are stored at midnight Console.WriteLine(t2); s1.Read(t1, t2); s2.Read(t1, t2); s1.WriteToConsole(); Assert.AreEqual(515150.0, s1[0].Value); Assert.AreEqual(817782.0, s2[0].Value); Assert.AreEqual(3, s1.Count); Assert.AreEqual(3, s2.Count); SeriesList sl = new SeriesList(); sl.Add(s1); sl.Add(s2); SimpleMathSeries c1 = new SimpleMathSeries("",sl,new MathOperation[]{ MathOperation.Add}); SimpleMathSeries c2 = new SimpleMathSeries("",sl, new MathOperation[] {MathOperation.Subtract}); int sdi3 = db.AddSeries(c1); int sdi4 = db.AddSeries(c2); Series s3 = db.GetSeries(sdi3); Series s4 = db.GetSeries(sdi4); s3.Read(t1, t2); s4.Read(t1, t2); Assert.AreEqual(515150.0 + 817782.0, s3[0].Value); Assert.AreEqual(515150.0 - 817782.0, s4[0].Value); }
/// <summary> /// Update Selected Series or folders /// Enabled only for Series or Folders selected NOT both at the /// same time /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void toolStripMenuUpdate_Click(object sender, EventArgs e) { if (tree1.SelectedFolders.Length == 0) { Series[] list = tree1.GetSelectedSeries(); ProcessSelectedSeries(SeriesProcess.Update,list); } else if (tree1.SelectedFolders.Length == 1) { SeriesList list = new SeriesList(); foreach (Series s in tree1.GetSeriesRecursive()) { list.Add(s); } ProcessSelectedSeries(SeriesProcess.Update, list.ToArray()); } else { MessageBox.Show("Please select a single folder to update."); } }
private void CalculateClick(object sender, EventArgs e) { if (tree1.SelectedFolders.Length == 0) { Series[] list = tree1.GetSelectedSeries(); ProcessSelectedSeries(SeriesProcess.Calculate,list); } else if (tree1.SelectedFolders.Length == 1) { SeriesList list = new SeriesList(); foreach (Series s in tree1.GetSeriesRecursive()) { if (s.Expression != "") // only perform calculations on calculation series with a valid expression { list.Add(s); } } if (list.Count > 0) { ProcessSelectedSeries(SeriesProcess.Calculate, list.ToArray()); } else { MessageBox.Show("No Calculation Series found in folder."); ClearDisplay(); return; } } else { MessageBox.Show("Please select a single folder to calculate."); ClearDisplay(); return; } //tree1_SelectionChanged(this, EventArgs.Empty); DrawBasedOnTreeSelection(); }
public static Series MLRInterpolationPisces(double fitTolerance, params Series[] s) { SeriesList sList = new SeriesList(); foreach (var item in s) { sList.Add(item); } var sOut = Reclamation.TimeSeries.Estimation.MultipleLinearRegression.MlrInterpolation(sList, new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 }, fitTolerance); return sOut.EstimatedSeries; }
private void ClassificationScenario(TrainingParameters td, List <TrainingElement> data, List <TrainingElement> testData) { //translate class numbers to network-intelligible matrices for (int i = 0; i < data.Count; i++) { var output = data[i].DesiredOutput; switch (output.At(0, 0)) { case 1: output = Matrix <double> .Build.DenseOfArray(new double[, ] { { 1 }, { 0 }, { 0 } }); break; case 2: output = Matrix <double> .Build.DenseOfArray(new double[, ] { { 0 }, { 1 }, { 0 } }); break; case 3: output = Matrix <double> .Build.DenseOfArray(new double[, ] { { 0 }, { 0 }, { 1 } }); break; } data[i].DesiredOutput = output; } for (int i = 0; i < testData.Count; i++) { var output = testData[i].DesiredOutput; switch (output.At(0, 0)) { case 1: output = Matrix <double> .Build.DenseOfArray(new double[, ] { { 1 }, { 0 }, { 0 } }); break; case 2: output = Matrix <double> .Build.DenseOfArray(new double[, ] { { 0 }, { 1 }, { 0 } }); break; case 3: output = Matrix <double> .Build.DenseOfArray(new double[, ] { { 0 }, { 0 }, { 1 } }); break; } testData[i].DesiredOutput = output; } Network.Gatherer = new ClassificationGatherer(testData, data); int iterations = td.Iterat; List <double> errorsAverage = new List <double>(new double[td.Epochs]); List <double> testerErrorAverage = new List <double>(new double[td.Epochs]); List <double> accuracyAveragesV = new List <double>(new double[td.Epochs]); List <double> accuracyAveragesT = new List <double>(new double[td.Epochs]); for (int i = 0; i < iterations; i++) { Network.Train(td.LearningRate, td.Epochs, td.Momentum, data); for (int j = 0; j < Network.Errors.Count; j++) { errorsAverage[j] += Network.Errors[j]; testerErrorAverage[j] += ((ClassificationGatherer)Network.Gatherer).TestErrors[j]; accuracyAveragesV[j] += ((ClassificationGatherer)Network.Gatherer).AccuracyListV[j]; accuracyAveragesT[j] += ((ClassificationGatherer)Network.Gatherer).AccuracyListT[j]; } } //Calculate average properties for (int j = 0; j < Network.Errors.Count; j++) { errorsAverage[j] /= iterations; testerErrorAverage[j] /= iterations; accuracyAveragesV[j] /= iterations; accuracyAveragesT[j] /= iterations; } IList <DataPoint> points = new List <DataPoint>(); IList <DataPoint> pointsTestError = new List <DataPoint>(); IList <DataPoint> pointsAccurracyV = new List <DataPoint>(); IList <DataPoint> pointsAccuracyT = new List <DataPoint>(); for (int i = 0; i < Network.Errors.Count; i++) { points.Add(new DataPoint(i + 1, errorsAverage[i])); pointsTestError.Add(new DataPoint(i + 1, testerErrorAverage[i])); pointsAccuracyT.Add(new DataPoint(i + 1, accuracyAveragesT[i])); pointsAccurracyV.Add(new DataPoint(i + 1, accuracyAveragesV[i])); } //calculate error matrix List <Tuple <int, int> > DesiredAndActualOutputs = new List <Tuple <int, int> >(); foreach (var telem in testData) { var desired = ((ClassificationGatherer)Network.Gatherer).ConvertMatrixToClass(telem.DesiredOutput); var gotten = ((ClassificationGatherer)Network.Gatherer).ConvertMatrixToClass(Network.ForwardPropagation(telem.Input)); DesiredAndActualOutputs.Add(new Tuple <int, int>(desired, gotten)); } int[,] ErrorMatrix = new int[3, 3]; foreach (var tupel in DesiredAndActualOutputs) { ErrorMatrix[tupel.Item1 - 1, tupel.Item2 - 1]++; } WindowDataGrid wdg = new WindowDataGrid(ErrorMatrix); wdg.Show(); SeriesList.Add(new OxyPlot.Wpf.LineSeries { ItemsSource = points, Title = $"Training error Learning rate: {td.LearningRate}, momentum: {td.Momentum}, hidden neuron count: {Network.Layers[0].NeuronCount}" }); SeriesList.Add(new OxyPlot.Wpf.LineSeries { ItemsSource = pointsTestError, Title = $"Validation error Learning rate: {td.LearningRate}, momentum: {td.Momentum}, hidden neuron count: {Network.Layers[0].NeuronCount}" }); SeriesList2.Add(new OxyPlot.Wpf.LineSeries { ItemsSource = pointsAccuracyT, Title = $"Training set accuracy Learning rate: {td.LearningRate}, momentum: {td.Momentum}, hidden neuron count: {Network.Layers[0].NeuronCount}" }); SeriesList2.Add(new OxyPlot.Wpf.LineSeries { ItemsSource = pointsAccurracyV, Title = $"Validation set accuracy Learning rate: {td.LearningRate}, momentum: {td.Momentum}, hidden neuron count: {Network.Layers[0].NeuronCount}" }); }
/// <summary> /// Computes rule curve in terms of 'required' storage /// by subracting space from the total available space /// </summary> /// <param name="t1"></param> /// <param name="t2"></param> /// <param name="totalSpace"></param> /// <returns></returns> public SeriesList CalculateFixedRuleCurve(DateTime t1, DateTime t2,double totalSpace) { var rval = new SeriesList(); Series s = CreateRuleLine(0, t1, t2); s = Max(-s + totalSpace,0); rval.Add(s); return rval; }
private DataTable GetTimeSeries() { HydrometHost svr = HydrometInfoUtility.HydrometServerFromPreferences(); string query = comboBoxInputs.Text.Trim(); if (m_db == TimeInterval.Daily) { if (CbttOnly(query)) { string[] pcodes = HydrometInfoUtility.ArchiveParameters(query); if (pcodes.Length > 0) { query = query + " " + String.Join(",", pcodes); } } string[] tokens = query.Split(' '); if (tokens.Length != 2) { return(new DataTable()); } string cbtt = tokens[0]; string pcode = tokens[1]; Series s = new HydrometDailySeries(cbtt, pcode, HydrometInfoUtility.HydrometServerFromPreferences()); var sl = new SeriesList(); sl.Add(s); int beginningMonth = 1; if (checkBoxWaterYear.Checked) { beginningMonth = 10; } var wyList = PiscesAnalysis.WaterYears(sl, this.yearSelector1.SelectedYears, false, beginningMonth, true); if (checkBoxCelsius.Checked) { for (int i = 0; i < wyList.Count; i++) { s = wyList[i]; if (s.Units.ToLower() == "degrees f") { Reclamation.TimeSeries.Math.ConvertUnits(s, "degrees C"); } } } // remove months outside selected range var list = FilterBySelectedRange(this.monthRangePicker1.MonthDayRange, wyList); return(list.ToDataTable(true)); // return HydrometUtility.ArchiveTable(svr,query, T1, T2); } //else // if (m_db == HydrometDataBase.Dayfiles) // { // if (CbttOnly(query)) // { // string[] pcodes = Hydromet.DayfileParameters(query); // if (pcodes.Length > 0) // { // query = query + " " + String.Join(",", pcodes); // } // } // return HydrometUtility.DayFilesTable(svr,query, T1, T2); // } // else // if (m_db == HydrometDataBase.MPoll) // { // return HydrometUtility.MPollTable(svr,query, T1, T2); // } return(new DataTable()); }
/// <summary> /// Finds all series in the expression. /// </summary> /// <returns></returns> private SeriesList SeriesInExpression(string expresion) { var sl = new SeriesList(); string[] variables = m_VariableParser.GetAllVariables(expresion); for (int i = 0; i < variables.Length; i++) { var alias = variables[i]; //if (Regex.IsMatch(alias, "\".+\"")) //{// strip double quotes // alias = alias.Substring(1, alias.Length - 2); //} var v = VariableResolver.Lookup(alias,defaultTimeInterval); if (v.IsSeries) { sl.Add(v.Series); } } return sl; }
public SeriesList CalculateVariableRuleCurves(DateTime t1, DateTime t2, double totalSpace, double percent) { var rval = new SeriesList(); if (m_fillType == FillType.Fixed) return rval; var levels = FcPlotDataSet.GetVariableForecastLevels(curveName); for (int i = 0; i < levels.Length; i++) { Series s = CreateRuleLine(levels[i], t1, t2) * percent; s = -s + totalSpace; s = Max(s, 0); rval.Add(s); } return rval; }
private void buttonRefresh_Click(object sender, EventArgs e) { try { timeSeriesGraph1.AnnotationOnMouseMove = checkBoxAnnotate.Checked; Cursor = Cursors.WaitCursor; Application.DoEvents(); string pcodeOrig = DeterminePcode(); timeSeriesGraph1.Clear(); string cbttOrig = comboBoxCbtt.Text.Trim(); string cbtt = cbttOrig, pcode = pcodeOrig; var seriesList = new List <string>(); if ((cbttOrig.Trim() == "" || pcodeOrig.Trim() == "") && textBoxMultiple.Text == "") { return; } else { if (!checkBoxUseList.Checked) { UserPreference.Save("Snowgg->cbtt", cbttOrig); UserPreference.Save("Snowgg->pcode", comboBoxPcode.Text.Trim()); seriesList.Add(cbttOrig + "_" + pcodeOrig); } else { var seriesItems = textBoxMultiple.Text.Split(','); foreach (string item in seriesItems) { if (item.Trim().Split(' ').Length == 2) { seriesList.Add(item.Trim().Split(' ')[0] + "_" + item.Trim().Split(' ')[1]); } } } } int[] waterYears = this.yearSelector1.SelectedYears; SeriesList finalSeriesCollection = new SeriesList(); foreach (string series in seriesList) { cbtt = series.Split('_')[0]; comboBoxCbtt.Text = cbtt; pcode = series.Split('_')[1]; comboBoxPcode.Text = pcode; var server = HydrometInfoUtility.HydrometServerFromPreferences(); var range = monthRangePicker1.MonthDayRange; Series s; if (this.checkBoxUseInstant.Checked) { s = new HydrometInstantSeries(cbtt, pcode, server); } else { s = new HydrometDailySeries(cbtt, pcode, server); } var sl = new SeriesList(); sl.Add(s); // get wy data var wyList = new SeriesList(); if (cySelected) { wyList = PiscesAnalysis.WaterYears(sl, waterYears, false, 1, true); } else { wyList = PiscesAnalysis.WaterYears(sl, waterYears, false, 10, true); } foreach (Series item in wyList) { item.Name = cbtt + " " + pcode; // remove missing data points var missingItems = item.Table.Select("value = 998877"); foreach (var row in missingItems) { item.RemoveAt(item.IndexOf(Convert.ToDateTime(row.ItemArray[0]))); } } // apply deltas and add stats if toggled wyList = ApplyDeltas(wyList, waterYears); AddStatistics(wyList); if (checkBoxGP.Checked) { GPAverage(cbtt, server, range, wyList); } var mp = ReadMpollData(pcode, cbtt); mp.RemoveMissing(); if (mp.Count > 0) { wyList.Add(mp); } // remove months outside selected range var list = FilterBySelectedRange(range, wyList); finalSeriesCollection.Add(list); } // Set series line colors var uniqueSeriesNames = new List <string>(); var uniqueSeriesColors = new List <string>(); int colorCounter = 0; foreach (var item in finalSeriesCollection) { // set line color by year which is identified in the legendtext field if (!uniqueSeriesNames.Contains(item.Appearance.LegendText) && !item.Appearance.LegendText.Contains("%") && !item.Appearance.LegendText.Contains("avg") && !item.Appearance.LegendText.Contains("max") && !item.Appearance.LegendText.Contains("min")) { uniqueSeriesNames.Add(item.Appearance.LegendText);//.Name); uniqueSeriesColors.Add(snowGgColors[colorCounter]); colorCounter = (colorCounter + 1) % snowGgColors.Count; } } foreach (var item in finalSeriesCollection) { try { int colIdx = uniqueSeriesNames.IndexOf(item.Appearance.LegendText);//.Name); item.Appearance.Color = uniqueSeriesColors[colIdx]; } catch { item.Appearance.Color = "Black"; } } this.timeSeriesGraph1.AnalysisType = AnalysisType.WaterYears; this.timeSeriesGraph1.Series = finalSeriesCollection; if (seriesList.Count == 1) { this.timeSeriesGraph1.Title = HydrometInfoUtility.LookupSiteDescription(cbtt) + " Elevation:" + HydrometInfoUtility.LookupElevation(cbtt); } //timeSeriesGraph1.GraphSettings = GetGraphSettings(); this.timeSeriesGraph1.Draw(true); comboBoxCbtt.Text = cbttOrig; comboBoxPcode.Text = pcodeOrig; timeSeriesGraph1.GraphSettings = GetGraphSettings(); } finally { Cursor = Cursors.Default; } }
/// <summary> /// Creates a list of water year based data all aligned to year 2000 /// to allow comparison. /// </summary> /// <param name="list">intput series</param> /// <param name="years">water years</param> /// <param name="avg30yr">when true also includes 30 year average. </param> /// <param name="beginningMonth">series starting month number</param> /// <returns></returns> public static SeriesList WaterYears(SeriesList list, int[] years, bool avg30yr, int beginningMonth, bool alwaysShiftTo2000, DateTime?startOf30YearAvearge = null) { SeriesList wySeries = new SeriesList(); for (int j = 0; j < list.Count; j++) { for (int i = 0; i < years.Length; i++) { YearRange yr = new YearRange(years[i], beginningMonth); Series s = list[j]; s.Clear(); s.Read(yr.DateTime1, yr.DateTime2); Logger.WriteLine("Read() " + yr.ToString() + " count = " + s.Count); foreach (string msg in s.Messages) { Logger.WriteLine(msg); } if (s.Count > 0 && s.CountMissing() != s.Count) { Series s2 = TimeSeries.Math.ShiftToYear(s, 2000); if (years.Length == 1 && !alwaysShiftTo2000 && !avg30yr) { s2 = s; } if (list.HasMultipleSites) { s2.Appearance.LegendText = years[i].ToString() + " " + list[j].Name; } else { s2.Appearance.LegendText = years[i].ToString(); } wySeries.Add(s2); } else { Logger.WriteLine("year :" + years[i] + "skipping series with no data " + s.Name + " " + s.Parameter); } } if (avg30yr) { DateTime start = DateTime.Now.Date.AddYears(-30); if (startOf30YearAvearge.HasValue) { start = startOf30YearAvearge.Value; } DateTime end = start.AddYears(30); list[j].Read(start, end); Series s30 = Math.MultiYearDailyAverage(list[j], beginningMonth); if (s30.Count > 0) { wySeries.Add(s30); } } } wySeries.Type = SeriesListType.WaterYears; if (wySeries.Count > 1) { wySeries.DateFormat = "MM/dd"; } return(wySeries); }
private void AddStatistics(SeriesList wyList) { bool anyStats = checkBoxMax.Checked || checkBoxMin.Checked || checkBoxAvg.Checked || checkBoxPctls.Checked; if (!anyStats) { return; } int y1 = 1990; int y2 = 2011; int[] pctls = new int[] { }; int.TryParse(this.textBoxWY1.Text, out y1); int.TryParse(this.textBoxWY2.Text, out y2); if (checkBoxPctls.Checked) { try { string values = textBoxPctls.Text; string[] tokens = values.Split(','); pctls = Array.ConvertAll <string, int>(tokens, int.Parse); } catch { pctls = new int[] { 10, 50, 90 }; } } DateTime t1, t2; if (cySelected) { t1 = new DateTime(y1, 1, 1); t2 = new DateTime(y2, 12, 31); } else { t1 = new DateTime(y1 - 1, 10, 1); t2 = new DateTime(y2, 9, 30); } var server = HydrometInfoUtility.HydrometServerFromPreferences(); Series s; if (this.checkBoxUseInstant.Checked) { s = new HydrometInstantSeries(comboBoxCbtt.Text.Trim(), DeterminePcode(), server); } else { s = new HydrometDailySeries(comboBoxCbtt.Text.Trim(), DeterminePcode(), server); } s.Read(t1, t2); s.RemoveMissing(); s.Appearance.LegendText = ""; YearRange yr; if (cySelected) { yr = new YearRange(2000, 1); } else { yr = new YearRange(2000, 10); } var list = Math.SummaryHydrograph(s, pctls, yr.DateTime1, checkBoxMax.Checked, checkBoxMin.Checked, checkBoxAvg.Checked, false); //, false); wyList.Add(list); }
/// <summary> /// Build a SeriesList with the trace exceedances /// </summary> /// <param name="sListIn"></param> /// <param name="excLevels"></param> /// <param name="xtraTraceCheck"></param> /// <param name="xtraTrace"></param> /// <returns></returns> private SeriesList getTraceExceedances(SeriesList sListIn, int[] excLevels, bool xtraTraceCheck, string xtraTrace, bool plotMinTrace, bool plotAvgTrace, bool plotMaxTrace) { SeriesList traceAnalysisList = new SeriesList(); // Define the index numbers from the serieslist wrt the selected exceedance level List <int> sExcIdxs = new List <int>(); foreach (var item in excLevels) { var sNew = new Series(); sNew.TimeInterval = sListIn[0].TimeInterval; sNew.Units = sListIn[0].Units; sNew.ScenarioName = item + "%Exceedance"; traceAnalysisList.Add(sNew); int excIdx; if (item > 50) { excIdx = Convert.ToInt16(System.Math.Ceiling(sListIn.Count * (100.0 - Convert.ToDouble(item)) / 100.0)); } else { excIdx = Convert.ToInt16(System.Math.Floor(sListIn.Count * (100.0 - Convert.ToDouble(item)) / 100.0)); } sExcIdxs.Add(excIdx); } // Add min trace if selected if (plotMinTrace) { var sNew = new Series(); sNew.TimeInterval = sListIn[0].TimeInterval; sNew.Units = sListIn[0].Units; sNew.ScenarioName = "Min"; traceAnalysisList.Add(sNew); sExcIdxs.Add(0); } // Add max trace if selected if (plotMaxTrace) { var sNew = new Series(); sNew.TimeInterval = sListIn[0].TimeInterval; sNew.Units = sListIn[0].Units; sNew.ScenarioName = "Max"; traceAnalysisList.Add(sNew); sExcIdxs.Add(sListIn.Count - 1); } // Define average trace container var sAvg = new Series(); sAvg.TimeInterval = sListIn[0].TimeInterval; sAvg.Units = sListIn[0].Units; sAvg.ScenarioName = "Avg"; // Populate the output serieslist with the exceddance curves var dTab = sListIn.ToDataTable(true); for (int i = 0; i < dTab.Rows.Count; i++) { var dRow = dTab.Rows[i]; DateTime t = DateTime.Parse(dRow[0].ToString()); var values = dRow.ItemArray; // Put the ith timestep values in a C# List and sort by ascending var valList = new List <double>(); var valSum = 0.0; for (int j = 1; j < values.Length; j++) { valList.Add(Convert.ToDouble(values[j].ToString())); valSum += Convert.ToDouble(values[j].ToString()); } valList.Sort(); // Grab the index corresponding to the selected exceedance level and populate the output serieslist for (int j = 0; j < sExcIdxs.Count; j++) { traceAnalysisList[j].Add(t, valList[sExcIdxs[j]], "interpolated"); } // Populate the average trace series if (plotAvgTrace) { sAvg.Add(t, valSum / valList.Count, "interpolated"); } } // Add average trace if selected if (plotAvgTrace) { traceAnalysisList.Add(sAvg); } // Add an extra reference trace if defined if (xtraTraceCheck) { //xtraTrace contains the run name "Name" var scenarioTable = Explorer.Database.GetSelectedScenarios(); var selectedScenarioRow = scenarioTable.Select("[Name] = '" + xtraTrace + "'")[0]; int selectedIdx = scenarioTable.Rows.IndexOf(selectedScenarioRow); //scenariosTable.Rows.IndexOf( if (xtraTrace == "") { throw new Exception("Select an additional trace that is between 1 and the total number of traces"); } else { traceAnalysisList.Add(sListIn[selectedIdx]); } } return(traceAnalysisList); }
/// <summary> /// MLR Interpolation Report /// Look for '[JR]' in this method to find the code regions that could use a fix or more testing... /// </summary> /// <param name="sInputs"></param> /// <param name="t1"></param> /// <param name="t2"></param> /// <param name="months"></param> /// <param name="fitTolerance"></param> /// <param name="waterYear"></param> public static MultipleLinearRegressionResults MlrInterpolation(SeriesList sList, int[] months, double fitTolerance, bool fillSelectedMonths = false) { // KT if there is not enough data (for example only 1 pont ) need to ignore that data set? MultipleLinearRegressionResults rval = new MultipleLinearRegressionResults(); // Populate SeriesLists var sListFill = new SeriesList(); foreach (var item in sList) { sListFill.Add(item.Copy()); } // Get dates to be filled with interpolated values var missing = sList[0].GetMissing(); if (fillSelectedMonths) //overwrites the 'missing' variable with another Series that only contains the selected dates in the input { Series missingSubset = new Series(); foreach (var row in missing) { if (months.Contains(row.DateTime.Month)) { missingSubset.Add(row); } } missing = missingSubset; } // Delete common dates where at least 1 data point is missing for any of the input series // This is done because the MLR routine does not support missing data. Missing data causes // data misalignments and throws off the regression... This section also deletes data for // months that are not tagged in the input for (int i = sList[0].Count - 1; i >= 0; i--) //start from the bottom of the list to bypass indexing problems { for (int j = 0; j < sList.Count; j++) { Point jthPt = sList[j][i]; if (jthPt.IsMissing || !months.Contains(jthPt.DateTime.Month)) { for (int k = 0; k < sList.Count; k++) //delete this date from all Series in the list { sList[k].RemoveAt(i); } break; } } } // Initialize MLR report and populate header List<string> mlrOut = new List<string>(); mlrOut.Add(""); mlrOut.Add("MLR Output\t\t\t\t\tRun Date: " + DateTime.Now); mlrOut.Add("Estimated Series: " + sList[0].Name); var sEstimators = ""; for (int i = 1; i < sList.Count; i++) { sEstimators = sEstimators + sList[i].Name + ", "; } mlrOut.Add("Estimator Series: " + sEstimators.Remove(sEstimators.Length - 2)); mlrOut.Add("Regression Date Range: " + sList[0].MinDateTime + " - " + sList[0].MaxDateTime); var monEstimators = ""; foreach (var item in months) { monEstimators = monEstimators + item + ", "; } mlrOut.Add("Months Used: " + monEstimators.Remove(monEstimators.Length - 2)); mlrOut.Add(""); mlrOut.Add("===================================================================================="); // Initialize output SeriesList var sOutList = new SeriesList(); // Loop through each SeriesList combination for MLR for (int k = 1; k <= sList.Count - 1; k++) { AllPossibleCombination combinationData = new AllPossibleCombination(sList.Count - 1, k); //uses StackOverflow Class for combinations var combinationList = combinationData.GetCombinations(); // Loop through each combination in the list and run MLR foreach (var combo in combinationList) { // Build MLR method inputs // xData is the different Series values that will be used to generate the MLR equation, all index > 0 in the SeriesList. Matrix format // yData is the target Series values that is the target for MLR, index = 0 of the SeriesList. Vector format double[][] xData = new double[sList[0].Count][]; double[] yData = new double[sList[0].Count]; // Loop through the dates to populate the xData and the yData for (int i = 0; i < sList[0].Count; i++) { var jthRow = new List<double>(); // Loop through each Series in SeriesList for (int j = 0; j < combo.Count(); j++) { jthRow.Add(sList[combo[j]][i].Value); } xData[i] = jthRow.ToArray(); yData[i] = sList[0][i].Value; } // MLR via Math.Net.Numerics double[] mlrCoeffs = MathNet.Numerics.LinearRegression.MultipleRegression.QR(xData, yData, true); //this is more stable than the method below //double[] p2 = MathNet.Numerics.Fit.MultiDim(xData, yData, true); //this method is faster but less stable // Evaluate fit Series sModeled = sList[0].Clone(); // Equations are of the form y = x1(s1) + x2(s2) + ... + xN the loop handles the inner part of the equation if it exists x2(s2) + ... // while the lines before and after the loop handles the first and last terms x1(s1) and xN respectively sModeled = sList[combo[0]] * mlrCoeffs[1]; for (int i = 2; i < mlrCoeffs.Count(); i++) { sModeled = sModeled + sList[combo[i - 1]] * mlrCoeffs[i]; } //magic number -1 is used so the correct corresponding Series is used with the correct mlr-coefficient sModeled = sModeled + mlrCoeffs[0]; var rVal = MathNet.Numerics.GoodnessOfFit.R(sModeled.Values, sList[0].Values);//this is the statistic reported by the FORTRAN code var rSqd = MathNet.Numerics.GoodnessOfFit.RSquared(sModeled.Values, sList[0].Values); //this is the R-squared for model fit // Fill missing dates and generate a SeriesList for final Series output var sOut = new Series(); //initialize Series to be added to output SeriesList foreach (var fillT in missing) { double fillVal; try { // This evaluates the equation generated during the MLR estimation. Same equation-code format as above fillVal = sListFill[combo[0]][fillT.DateTime].Value * mlrCoeffs[1]; for (int i = 2; i < mlrCoeffs.Count(); i++) { fillVal = fillVal + sListFill[combo[i - 1]][fillT.DateTime].Value * mlrCoeffs[i]; } fillVal = fillVal + mlrCoeffs[0]; if (fillVal < 0.0) { sOut.Add(fillT.DateTime, Point.MissingValueFlag, "NoDataForInterpolation"); } else { sOut.Add(fillT.DateTime, fillVal, rVal.ToString("F05")); } //[JR] this assigns the R value as the flag, can be switched to R-Squared... } catch { sOut.Add(fillT.DateTime, Point.MissingValueFlag, "NoDataForInterpolation"); } } // Add the output Series to a SeriesList sOutList.Add(sOut); // Populate report mlrOut.Add(""); string equationString = "MLR Equation: " + sList[0].Name + " = "; for (int ithCoeff = 1; ithCoeff < mlrCoeffs.Count(); ithCoeff++) { equationString = equationString + mlrCoeffs[ithCoeff].ToString("F04") + "(" + sList[combo[ithCoeff - 1]].Name + ") + "; } equationString = equationString + mlrCoeffs[0].ToString("F04"); mlrOut.Add(equationString); mlrOut.Add("Correlation Coefficient = " + rVal.ToString("F04")); mlrOut.Add("R-Squared Coefficient = " + rSqd.ToString("F04")); mlrOut.Add("MLR Estimates: "); foreach (var item in sOut) { mlrOut.Add("\t\t" + item.ToString(true)); } mlrOut.Add(""); mlrOut.Add("------------------------------------------------------------------------------------"); } } // Generate MLR report //TextFile tf = new TextFile(mlrOut.ToArray()); //var fn = FileUtility.GetTempFileName(".txt"); //tf.SaveAs(fn); //System.Diagnostics.Process.Start(fn); rval.Report = mlrOut.ToArray(); // Generate output Series var sOutFinal = sListFill[0].Copy(); // Rmove the Points to be filled in the original input Series for (int i = missing.Count - 1; i >= 0; i--) { sOutFinal.RemoveAt(sOutFinal.IndexOf(missing[i].DateTime)); } // Find the best fit out of all the estimated values // Loops through the dates foreach (var sRow in sOutList[0]) { DateTime estT = sRow.DateTime; List<double> flagItems = new List<double>();//container for flag values List<double> valItems = new List<double>();//container for estiamted values // Loops through each estimate for (int i = 0; i < sOutList.Count; i++) { Point estPt = sOutList[i][estT]; valItems.Add(estPt.Value); if (estPt.Value < 0.0) //add 0 correlation value if the estimated value < 0, [JR] this prevents the use of this routine to estimate negative values... { flagItems.Add(0.0); } else { flagItems.Add(Convert.ToDouble(estPt.Flag)); } } var maxFit = flagItems.Max(); var bestFitVal = valItems[flagItems.IndexOf(maxFit)]; if (maxFit >= fitTolerance) //add the value if it exceeds the specified tolerance { sOutFinal.Add(estT, bestFitVal, "E"); } else //add missing since there is no acceptable estimate to fill this missing value { sOutFinal.AddMissing(estT); } } //return sOutFinal; rval.EstimatedSeries = sOutFinal; return rval; }
private void buttonGo_Click(object sender, EventArgs e) { Cursor = Cursors.WaitCursor; Application.DoEvents(); try { int wy1 = Convert.ToInt32(textBoxYear.Text); int wy2 = Convert.ToInt32(textBoxEndYear.Text); string cbtt = textBoxCbtt.Text; string pcode = textBoxPcode.Text; var s = Reclamation.TimeSeries.Math.HydrometDaily(cbtt, pcode); var t1 = new DateTime(wy1 - 1, 10, 1); var t2 = new DateTime(wy2, 9, 30); s.Read(t1, t2); var list = new SeriesList(); if (checkBoxTotal.Checked) { var rval = Reclamation.TimeSeries.Math.MonthlySum(s); rval.Units = "CFS"; list.Add(rval); } if (checkBoxTotalAF.Checked) { var tmp = s.Copy(); Reclamation.TimeSeries.Math.Multiply(tmp, 1.98347); var rval = Reclamation.TimeSeries.Math.MonthlySum(tmp); rval.Units = "Acre-Feet"; list.Add(rval); } if (checkBoxAverage.Checked) { var rval = Reclamation.TimeSeries.Math.MonthlyAverage(s); rval.Units = "CFS"; list.Add(rval); } if (checkBoxAverageAF.Checked) { var tmp = s.Copy(); Reclamation.TimeSeries.Math.Multiply(tmp, 1.98347); var rval = Reclamation.TimeSeries.Math.MonthlyAverage(tmp); rval.Units = "Acre-Feet"; list.Add(rval); } if (checkBoxChange.Checked) { var start = Reclamation.TimeSeries.Math.StartOfMonth(s); var end = Reclamation.TimeSeries.Math.EndOfMonth(s); var rval = end - start;// Reclamation.TimeSeries.Math.Add(end, start, true); list.Add(rval); } if (checkBoxMax.Checked) { var rval = Reclamation.TimeSeries.Math.MonthlyMax(s); list.Add(rval); } if (checkBoxMin.Checked) { var rval = Reclamation.TimeSeries.Math.MonthlyMin(s); list.Add(rval); } if (checkBoxFirstMonth.Checked) { var rval = Reclamation.TimeSeries.Math.StartOfMonth(s); list.Add(rval); } if (checkBoxEndMonth.Checked) { var rval = Reclamation.TimeSeries.Math.EndOfMonth(s); list.Add(rval); } view.SeriesList = list; view.Draw(); } finally { Cursor = Cursors.Default; } }
/// <summary> /// ComputeTargets method uses the a Rule curve, forecast, and historical average /// to project flood target levels through the forecast period. /// computes a target Seriesuses the current forecast and forecast volume period /// </summary> /// <param name="pt"></param> /// <param name="waterYear"></param> /// <param name="start"></param> /// <param name="optionalPercents"></param> /// <returns></returns> public static SeriesList ComputeTargets(FloodControlPoint pt, int waterYear, Point start, int[] optionalPercents, bool dashed, bool forecastOverride, string forecastValIn) { string cbtt = pt.StationFC; SeriesList rval = new SeriesList(); Series forecast = new Series(); int forecastMonth = 0; double forecastValue = 0; if (forecastOverride) { forecastMonth = DateTime.Now.Month;// System.Math.Min(1, DateTime.Now.Month); forecastValue = Convert.ToDouble(forecastValIn); } else { //calculate forecast of most recent month forecast = GetLatestForecast(cbtt, waterYear); forecastMonth = MonthOfLastForecast(forecast); //if no forecast cannot compute target if (forecastMonth == 0) { return(rval); } //value of forecast to use for percent of average forecastValue = forecast[forecastMonth - 1].Value; } // average runoff month - end(typically July) volume if (cbtt.ToLower() == "hgh") { var avg30yrQU = Get30YearAverageSeries(pt.DailyStationQU, "qu", 5); // sum volume for the forecast period (may,sep) var t = new DateTime(start.DateTime.Year, 5, 1); var t2 = new DateTime(start.DateTime.Year, 9, 30); double historicalAverageResidual = SumResidual(avg30yrQU, t, t2); double percent = forecastValue / historicalAverageResidual; var x = HGHTarget(pt, forecastValue, start.DateTime, t); x.Name = "Forecast " + (100 * percent).ToString("F0") + "% " + (forecastValue / 1000.0).ToString("F0");; rval.Add(x); for (int i = 0; i < optionalPercents.Length; i++) { var fc = historicalAverageResidual * optionalPercents[i] / 100.0; x = HGHTarget(pt, fc, start.DateTime, t); x.Name = "Target (" + optionalPercents[i].ToString("F0") + "%) " + (fc / 1000.0).ToString("F0"); rval.Add(x); } } else { rval.Add(GetTargets(pt, waterYear, start, optionalPercents, forecastMonth, forecastValue, dashed)); } return(rval); }
private SeriesList CreateSeriesList() { var interval = m_formatter.Interval; TimeSeriesName[] names = GetTimeSeriesName(m_collection, interval); var tableNames = (from n in names select n.GetTableName()).ToArray(); var sc = db.GetSeriesCatalog("tablename in ('" + String.Join("','", tableNames) + "')"); SeriesList sList = new SeriesList(); foreach (var tn in names) { Series s = new Series(); s.TimeInterval = interval; if (sc.Select("tablename = '" + tn.GetTableName() + "'").Length == 1) { s = db.GetSeriesFromTableName(tn.GetTableName()); } s.Table.TableName = tn.GetTableName(); sList.Add(s); } return sList; }
/// <summary> /// Build a SeriesList with custom aggregation /// </summary> /// <param name="sListIn"></param> /// <param name="sumType"></param> /// <returns></returns> private SeriesList getTraceSums(SeriesList sListIn, string aggType) { SeriesList traceAnalysisList = new SeriesList(); foreach (var s in sListIn) { var sNew = new Series(); if (aggType == "CY") { sNew = Reclamation.TimeSeries.Math.AnnualSum(s, new MonthDayRange(1, 1, 12, 31), 1); } else if (aggType == "WY") { sNew = Reclamation.TimeSeries.Math.AnnualSum(s, new MonthDayRange(10, 1, 9, 30), 10); } else if (aggType == "XX") { sNew = Reclamation.TimeSeries.Math.AnnualSum(s, Explorer.MonthDayRange, Explorer.MonthDayRange.Month1); } else { view.Messages.Add(""); } sNew.TimeInterval = s.TimeInterval; sNew.Units = s.Units; traceAnalysisList.Add(sNew); } return traceAnalysisList; }
/// <summary> /// converts Daily formated data for 'arcimport.exe' prorgram into SeriesList /// each series is named daily_cbtt_pcode, /// for example daily_jck_fb /// </summary> /// <param name="fileName"></param> /// <returns></returns> public static SeriesList HydrometDailyDataToSeriesList(TextFile tf) { var rval = new SeriesList(); for (int i = 1; i < tf.Length; i++) // skip first row (header) { var fmt = "MM/dd/yyyy"; var strDate = tf[i].Substring(0, fmt.Length); DateTime t; if (!DateTime.TryParseExact(strDate, fmt, new CultureInfo("en-US"), System.Globalization.DateTimeStyles.None, out t)) { Console.WriteLine("Bad Date, Skipping line: " + tf[i]); continue; } /* * 07/28/2016 CKVY MN 998877.00 40.91 */ string cbtt = tf[i].Substring(11, 12).Trim().ToLower(); string pcode = tf[i].Substring(24, 9).Trim().ToLower(); string strValue = tf[i].Substring(34, 10); double val = 0; if (!double.TryParse(strValue, out val)) { Console.WriteLine("Error parsing double " + strValue); continue; } string name = "daily_" + cbtt + "_" + pcode; name = name.ToLower(); var idx = rval.IndexOfTableName(name); Series s; if (idx >= 0) { s = rval[idx]; } else { s = new Series(); s.TimeInterval = TimeInterval.Daily; s.SiteID = cbtt; s.Parameter = pcode; s.Name = cbtt + "_" + pcode; s.Name = s.Name.ToLower(); s.Table.TableName = name; rval.Add(s); } string flag = ""; if (s.IndexOf(t) < 0) { s.Add(t, val, flag); } else { Logger.WriteLine(s.SiteID + ":" + s.Parameter + "skipped duplicate datetime " + t.ToString()); } } return(rval); }
public override IExplorerView Run() { Logger.WriteLine("TraceAnalysis.Run()"); SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); string title = list.Text.TitleText(); string subTitle = list.MissingRecordsMessage; // [JR] don't perform trace analysis if trace count < 10... if (list.Count < 10) { view.Messages.Add("Trace exceedance analysis is not available if trace count < 10"); view.Title = title; view.SubTitle = subTitle; view.SeriesList = list; view.DataTable = list.ToDataTable(true); return view; } // This seems to be common between all the analysis options if (Explorer.SelectedSeries.Length == 1 && Explorer.MergeSelected) { // merge single Year Traces. list.RemoveMissing(); var s = list.MergeYearlyScenarios(); list = new SeriesList(); list.Add(s); } view.Messages.Add(list.MissingRecordsMessage); list.RemoveMissing(); // Initialize the output container SeriesList traceAnalysisList = new SeriesList(); // Get exceedance curves if (Explorer.traceExceedanceAnalysis) { traceAnalysisList = getTraceExceedances(list, Explorer.ExceedanceLevels, Explorer.AlsoPlotTrace, Explorer.PlotTrace, Explorer.PlotMinTrace, Explorer.PlotAvgTrace, Explorer.PlotMaxTrace); } // Get aggregated values if (Explorer.traceAggregationAnalysis) { string sumType = ""; if (Explorer.sumCYRadio) { sumType = "CY"; } else if (Explorer.sumWYRadio) { sumType = "WY"; } else if (Explorer.sumCustomRangeRadio) { sumType = "XX"; } else { } traceAnalysisList = getTraceSums(list, sumType); } // [JR] Add other analysis/report building options here... Explorer.WriteProgressMessage("drawing graph", 80); view.Title = title; view.SubTitle = subTitle; view.SeriesList = traceAnalysisList; view.DataTable = traceAnalysisList.ToDataTable(true); //view.Draw(); return view; }
public void Add(Series s) { seriesList.Add(s); }
/// <summary> /// Reads TextFile into a Series List /// </summary> /// <param name="tf"></param> /// <returns></returns> internal static SeriesList FileToSeriesList(TextFile tf) { SeriesList rval = new SeriesList(); /* * cbtt,pc,Year,month,value,flag,oldValue,oldFlag * ARK, PM,2018,JAN,1.00,M,5.36,M */ for (int i = 1; i < tf.Length; i++) // skip first row (header) { var tokens = tf[i].Split(','); if (tokens.Length != 8) { Console.WriteLine("Skipping invalid line: " + tf[i]); continue; } var siteid = tokens[0].ToLower(); var parameter = tokens[1].ToLower(); DateTime t; if (!ParseDate(tokens[2], tokens[3], out t)) { Console.WriteLine("Error Parsing date " + tf[i]); continue; } double val = 0; if (!double.TryParse(tokens[4], out val)) { Console.WriteLine("Error Parsing value: " + tokens[4]); continue; } var flag = tokens[5]; string name = "monthly_" + siteid + "_" + parameter; var idx = rval.IndexOfTableName(name); Series s; if (idx >= 0) { s = rval[idx]; } else { s = new Series(); s.TimeInterval = TimeInterval.Monthly; s.SiteID = siteid; s.Parameter = parameter; s.Name = siteid + "_" + parameter; s.Name = s.Name.ToLower(); s.Table.TableName = name; rval.Add(s); } if (s.IndexOf(t) < 0) { s.Add(t, val, flag); } else { Logger.WriteLine(s.SiteID + ":" + s.Parameter + "skipped duplicate datetime " + t.ToString()); } } return(rval); }
/// <summary> /// Build a SeriesList with the trace exceedances /// </summary> /// <param name="sListIn"></param> /// <param name="excLevels"></param> /// <param name="xtraTraceCheck"></param> /// <param name="xtraTrace"></param> /// <returns></returns> private SeriesList getTraceExceedances(SeriesList sListIn, int[] excLevels, bool xtraTraceCheck, string xtraTrace, bool plotMinTrace, bool plotAvgTrace, bool plotMaxTrace) { SeriesList traceAnalysisList = new SeriesList(); // Define the index numbers from the serieslist wrt the selected exceedance level List<int> sExcIdxs = new List<int>(); foreach (var item in excLevels) { var sNew = new Series(); sNew.TimeInterval = sListIn[0].TimeInterval; sNew.Units = sListIn[0].Units; sNew.ScenarioName = item + "%Exceedance"; traceAnalysisList.Add(sNew); int excIdx; if (item > 50) { excIdx = Convert.ToInt16(System.Math.Ceiling(sListIn.Count * (100.0 - Convert.ToDouble(item)) / 100.0)); } else { excIdx = Convert.ToInt16(System.Math.Floor(sListIn.Count * (100.0 - Convert.ToDouble(item)) / 100.0)); } sExcIdxs.Add(excIdx); } // Add min trace if selected if (plotMinTrace) { var sNew = new Series(); sNew.TimeInterval = sListIn[0].TimeInterval; sNew.Units = sListIn[0].Units; sNew.ScenarioName = "Min"; traceAnalysisList.Add(sNew); sExcIdxs.Add(0); } // Add max trace if selected if (plotMaxTrace) { var sNew = new Series(); sNew.TimeInterval = sListIn[0].TimeInterval; sNew.Units = sListIn[0].Units; sNew.ScenarioName = "Max"; traceAnalysisList.Add(sNew); sExcIdxs.Add(sListIn.Count - 1); } // Define average trace container var sAvg = new Series(); sAvg.TimeInterval = sListIn[0].TimeInterval; sAvg.Units = sListIn[0].Units; sAvg.ScenarioName = "Avg"; // Populate the output serieslist with the exceddance curves var dTab = sListIn.ToDataTable(true); for (int i = 0; i < dTab.Rows.Count; i++) { var dRow = dTab.Rows[i]; DateTime t = DateTime.Parse(dRow[0].ToString()); var values = dRow.ItemArray; // Put the ith timestep values in a C# List and sort by ascending var valList = new List<double>(); var valSum = 0.0; for (int j = 1; j < values.Length; j++) { valList.Add(Convert.ToDouble(values[j].ToString())); valSum += Convert.ToDouble(values[j].ToString()); } valList.Sort(); // Grab the index corresponding to the selected exceedance level and populate the output serieslist for (int j = 0; j < sExcIdxs.Count; j++) { traceAnalysisList[j].Add(t, valList[sExcIdxs[j]],"interpolated"); } // Populate the average trace series if (plotAvgTrace) { sAvg.Add(t, valSum / valList.Count, "interpolated"); } } // Add average trace if selected if (plotAvgTrace) { traceAnalysisList.Add(sAvg); } // Add an extra reference trace if defined if (xtraTraceCheck) { //xtraTrace contains the run name "Name" var scenarioTable = Explorer.Database.GetSelectedScenarios(); var selectedScenarioRow = scenarioTable.Select("[Name] = '" + xtraTrace + "'")[0]; int selectedIdx = scenarioTable.Rows.IndexOf(selectedScenarioRow); //scenariosTable.Rows.IndexOf( if (xtraTrace == "") { throw new Exception("Select an additional trace that is between 1 and the total number of traces"); } else { traceAnalysisList.Add(sListIn[selectedIdx]); } } return traceAnalysisList; }
private void buttonRefresh_Click(object sender, EventArgs e) { try { timeSeriesGraph1.AnnotationOnMouseMove = checkBoxAnnotate.Checked; Cursor = Cursors.WaitCursor; Application.DoEvents(); string pcode = DeterminePcode(); timeSeriesGraph1.Clear(); string cbtt = comboBoxCbtt.Text.Trim(); if (cbtt.Trim() == "" || pcode.Trim() == "") { return; } UserPreference.Save("Snowgg->cbtt", cbtt); UserPreference.Save("Snowgg->pcode", comboBoxPcode.Text.Trim()); int[] waterYears = this.yearSelector1.SelectedYears; var server = HydrometInfoUtility.HydrometServerFromPreferences(); var range = monthRangePicker1.MonthDayRange; Series s = new HydrometDailySeries(cbtt, pcode, server); var sl = new SeriesList(); sl.Add(s); var wyList = PiscesAnalysis.WaterYears(sl, waterYears, false, 10, true); AddStatistics(wyList); if (checkBoxGP.Checked) { GPAverage(cbtt, server, range, wyList); } var mp = ReadMpollData(pcode, cbtt); mp.RemoveMissing(); if (mp.Count > 0) { wyList.Add(mp); } // remove months outside selected range var list = FilterBySelectedRange(range, wyList); this.timeSeriesGraph1.AnalysisType = AnalysisType.WaterYears; this.timeSeriesGraph1.Series = list; this.timeSeriesGraph1.Title = HydrometInfoUtility.LookupSiteDescription(cbtt) + " Elevation:" + HydrometInfoUtility.LookupElevation(cbtt); this.timeSeriesGraph1.Draw(true); timeSeriesGraph1.GraphSettings = GetGraphSettings(); } finally { Cursor = Cursors.Default; } }