private DataTable Read(SeriesList list, DateTime t1, DateTime t2) { var sql = CreateSQL(list, t1, t2); var tbl = db.Server.Table("tbl", sql); return(tbl); }
private string CreateSQL(SeriesList list, DateTime t1, DateTime t2) { var sql = ""; for (int i = 0; i < list.Count; i++) { string tableName = list[i].Table.TableName; if (!db.Server.TableExists(tableName)) { continue; } sql += "SELECT '" + tableName + "' as tablename, datetime,value,flag FROM " + tableName; if (t1 != TimeSeriesDatabase.MinDateTime || t2 != TimeSeriesDatabase.MaxDateTime) { sql += " WHERE datetime >= " + db.Server.PortableDateString(t1, TimeSeriesDatabase.dateTimeFormat) + " AND " + " datetime <= " + db.Server.PortableDateString(t2, TimeSeriesDatabase.dateTimeFormat); } if (i != list.Count - 1) { sql += " UNION ALL \n"; } } sql += " \norder by datetime,tablename "; return(sql); }
/// <summary> /// Gets the queried series and generates simple text output /// </summary> /// <param name="query"></param> /// <returns></returns> private void WriteCsv(SeriesList list, TimeInterval interval, DateTime t1, DateTime t2) { Console.WriteLine("BEGIN DATA"); WriteSeriesHeader(list, interval); int maxDaysInMemory = 30; // maxDaysIhn memory // maxdays list.Read() REad() // 10 // var t = t1; Performance p = new Performance(); while (t < t2) { var t3 = t.AddDays(maxDaysInMemory).EndOfDay(); if (t3 > t2) { t3 = t2; } var tbl = Read(list, t, t3); // 0.0 seconds windows/linux PrintDataTable(list, tbl); t = t3.NextDay(); } Console.WriteLine("END DATA"); // p.Report("done"); }
private void ReadFromPisces() { Logger.WriteLine("opening " + m_dbName); SQLiteServer svr = new SQLiteServer(m_dbName); TimeSeriesDatabase db = new TimeSeriesDatabase(svr); SeriesList list = new SeriesList(); for (int i = 0; i < m_seriesName.Count; i++) { Logger.WriteLine("looking for series '" + m_seriesName[i] + "'"); var s = db.GetSeriesFromName(m_seriesName[i]); if (s != null) { s.Read(m_t1, m_t2); list.Add(s); } else { throw new Exception("unable to find series '" + m_seriesName[i] + "' in pisces database '" + m_dbName + "'"); } } WriteToRiverwareFiles(list); }
private void WriteToRiverwareFiles(SeriesList list) { for (int i = 0; i < list.Count; i++) { Series s = list[i]; File.Delete(filename[i]); if (s.Count <= 0) { continue; } StreamWriter sw = new StreamWriter(filename[i]); sw.WriteLine("# this data was imported from Hydromet " + DateTime.Now.ToString()); sw.WriteLine("# " + cbtt[i] + " " + pcode[i]); sw.WriteLine("start_date: " + s[0].DateTime.AddDays(slot_offset[i]).ToString("yyyy-MM-dd") + " 24:00"); for (int j = 0; j < s.Count; j++) { if (s[j].IsMissing) { Console.WriteLine(cbtt[i] + " " + pcode[i] + " Error: missing data " + s[j].ToString()); sw.WriteLine("NaN"); } else { sw.WriteLine(s[j].Value); } } sw.Close(); } }
public override IExplorerView Run() { Logger.WriteLine("TimeSeriesAnalysis.Run()"); SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); if (Explorer.SelectedSeries.Length == 1 && Explorer.MergeSelected) { // merge single Year Traces. var s = list.MergeYearlyScenarios(); list = new SeriesList(); list.Add(s); } view.SeriesList = list; string title = list.Text.TitleText(); if (Explorer.SubtractFromBaseline) { title = "Subtract Reference \n" + title; } view.Title = title; view.SubTitle = list.MissingRecordsMessage; //view.DataTable = myList.CompositeTable; return(view); }
public void DrawSorted(SeriesList list, string title, string subTitle,string xAxisTitle) { Clear(); if (list.Count == 0) { return; } for (int i = 0; i < list.Count; i++) { PointPairList pairs = new PointPairList(); foreach (var pt in list[i]) { pairs.Add(pt.Percent, pt.Value); } var color = Default.GetSeriesColor(pane.CurveList.Count); LineItem myCurve = pane.AddCurve(list[i].Appearance.LegendText, pairs, color); myCurve.Symbol.IsVisible = false; myCurve.Line.Width = Default.GetSeriesWidth(pane.CurveList.Count); } pane.Title.Text = title + "\n" + subTitle; pane.XAxis.Title.Text = xAxisTitle; FormatBottomAxisNumeric(); pane.XAxis.Scale.Format = ""; pane.XAxis.Scale.MajorStep = 5; FormatYAxisStandard(); SetPaneVisible(true); LabelYaxis(list); RefreshChart(chart1); }
private SeriesList CreateSeriesList() { var interval = m_formatter.Interval; TimeSeriesName[] names = GetTimeSeriesName(m_collection, interval); var tableNames = (from n in names select n.GetTableName()).ToArray(); var sc = db.GetSeriesCatalog("tablename in ('" + String.Join("','", tableNames) + "')"); SeriesList sList = new SeriesList(); foreach (var tn in names) { Series s = new Series(); s.TimeInterval = interval; if (sc.Select("tablename = '" + tn.GetTableName() + "'").Length == 1) { s = db.GetSeriesFromTableName(tn.GetTableName()); } s.Table.TableName = tn.GetTableName(); sList.Add(s); } return(sList); }
public void AddSeries(List <DataPoint> data) { //sql.Open(LineChartSettings.Settings.CreateDescription()); //string query = "SELECT market.id,market.sellprice,market.buyprice,market.selloffers,market.buyorders,market.datetime FROM market WHERE market.itemnumber = @id AND datetime > DATE_SUB(CURRENT_TIMESTAMP, INTERVAL @days DAY);"; //var p = new Parameter { Identifier = "@id", Value = id }; //var p2 = new Parameter { Identifier = "@days", Value = 7 }; //var parmeter = new List<Parameter>(); //parmeter.Add(p); //parmeter.Add(p2); //List<DataPoint> datapoints = new List<DataPoint>(); //var ds = sql.SelectDataSet(query, parmeter); //if (ds != null && ds.Count > 0) //{ // foreach (var row in ds) // { // datapoints.Add(ChartItem.CreateForMinimalChart(row)); // } //} SeriesList.Add(new Series() { Items = data }); }
/// <summary> /// Print the daily data from the Series List /// </summary> /// <param name="list"></param> Series list private static void PrintDaily(SeriesList list) { var tbl = list.ToDataTable(false); var title = "\nStation Parameter "; var title2 = "========= ========== ========= ========= ========= ========= ========="; for (int i = 0; i < tbl.Rows.Count; i++) { DateTime t = Convert.ToDateTime(tbl.Rows[i][0]); title += t.ToString("ddd MMMdd"); title += " "; } Console.WriteLine(title); Console.WriteLine(title2); foreach (var item in list) { string x = item.SiteID.PadRight(10) + " " + item.Parameter.PadRight(11) + " "; foreach (var pt in item) { x += pt.Value.ToString("F2").PadLeft(10); } Console.WriteLine(x); } }
private SeriesList ApplyDeltas(SeriesList wyList, int[] waterYears) { int sCount = wyList.Count; if (checkBoxDeltas.Checked && sCount > 1) { var deltaList = new SeriesList(); Series s1 = wyList[0]; s1.RemoveMissing(); deltaList.Add(s1); for (int sIdx = 1; sIdx < sCount; sIdx++) { var ithS = wyList[sIdx]; var deltaS = new Series(); var deltaNoZeroS = new Series(); int deltaCounter = 0; for (int ptIdx = 1; ptIdx < ithS.Count; ptIdx++) { if (ithS[ptIdx].DateTime < s1.MaxDateTime) { deltaS.Add(ithS[ptIdx].DateTime, double.NaN); deltaNoZeroS.Add(ithS[ptIdx].DateTime, double.NaN); } else if (ithS[ptIdx].DateTime == s1.MaxDateTime) { deltaS.Add(s1[ithS[ptIdx].DateTime]); deltaNoZeroS.Add(s1[ithS[ptIdx].DateTime]); } else if (ithS[ptIdx].DateTime > s1.MaxDateTime) { // Filter out projected zeros but keep the running negatives in terms of calculating the evolving // delta-differenced values var calcVal = deltaS[deltaCounter - 1].Value + ithS[ptIdx].Value - ithS[ptIdx - 1].Value; deltaS.Add(ithS[ptIdx].DateTime, calcVal); if (calcVal < 0) { deltaNoZeroS.Add(ithS[ptIdx].DateTime, 0); } else { deltaNoZeroS.Add(ithS[ptIdx].DateTime, calcVal); } } deltaCounter++; } deltaNoZeroS.Units = s1.Units; deltaNoZeroS.Name = waterYears[sIdx].ToString("F0") + " deltas"; // [JR] Displays original data in addition to delta curves //wyList.Add(deltaS); // [JR] Only shows delta curves deltaList.Add(deltaNoZeroS); } return(deltaList); } else { return(wyList); } }
private SeriesList CreateAndRead(int year, int month, bool getAverageData = false) { if (year == 9999 && getAverageData) { var rval = new SeriesList(); for (int i = 0; i < cbttPodes.Count; i++) { var tokens = cbttPodes[i].Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); var cbtt = tokens[0].Trim(); var pcode = tokens[1].Trim(); var d = HydrometMonthlySeries.AverageValue30Year(cbtt, pcode, month, month); Series s = new Series("", TimeInterval.Monthly); var t = new DateTime(year, 1, 1); s.Add(t, d); rval.Add(s); } return(rval); } else { var a = CreateSeriesList(getAverageData); a.Read(year, month); return(a); } }
private void Scale(SeriesList d, double estimationScaleFactor) { for (int i = 0; i < d.Count; i++) { Reclamation.TimeSeries.Math.Multiply(d[i], estimationScaleFactor); } }
/// <summary> /// Build a SeriesList with custom aggregation /// </summary> /// <param name="sListIn"></param> /// <param name="sumType"></param> /// <returns></returns> private SeriesList getTraceSums(SeriesList sListIn, string aggType) { SeriesList traceAnalysisList = new SeriesList(); foreach (var s in sListIn) { var sNew = new Series(); if (aggType == "CY") { sNew = Reclamation.TimeSeries.Math.AnnualSum(s, new MonthDayRange(1, 1, 12, 31), 1); } else if (aggType == "WY") { sNew = Reclamation.TimeSeries.Math.AnnualSum(s, new MonthDayRange(10, 1, 9, 30), 10); } else if (aggType == "XX") { sNew = Reclamation.TimeSeries.Math.AnnualSum(s, Explorer.MonthDayRange, Explorer.MonthDayRange.Month1); } else { view.Messages.Add(""); } sNew.TimeInterval = s.TimeInterval; sNew.Units = s.Units; traceAnalysisList.Add(sNew); } return(traceAnalysisList); }
public override IExplorerView Run() { SeriesList list = Explorer.CreateSelectedSeries(); if (list.Count != 2) { string msg = "Correlation Graph requires exactly two series, there are " + list.Count + " series selected"; view.Messages.Add(msg); Logger.WriteLine(msg); view.SeriesList.Clear(); view.DataTable = new DataTable(); return(view); } ReadSeriesList(list); string title = list.Text.TitleText(); string subTitle = list.MissingRecordsMessage; list.RemoveMissing(); SeriesList myList = list.Subset(Explorer.MonthDayRange); myList = myList.AggregateAndSubset(Explorer.StatisticalMethods, Explorer.MonthDayRange, Explorer.BeginningMonth); view.Title = title; view.SubTitle = subTitle; view.SeriesList = myList; return(view); }
private string BuildUnionSQL(SeriesList list, DateTime t1, DateTime t2, int startIndex) { string sql = ""; for (int i = startIndex; i < list.Count; i++) { var tableName = list[i].Table.TableName; if (!db.Server.TableExists(tableName)) { sql += " \nSELECT '" + tableName + "' as tablename , current_timestamp as datetime, -998877.0 as value, '' as flag where 0=1 "; } else { sql += " \nSELECT '" + tableName + "' as tablename, datetime,value,flag FROM " + tableName; sql += DateWhereClause(t1, t2); } if (i != list.Count - 1) { sql += " UNION ALL \n"; } } return(sql); }
/// <summary> /// Routes a list of Series as a group /// hydromet cbtt is copied from list[i].SiteName /// hydromet pcode is copied from list[i].Parameter /// </summary> /// <param name="list"></param> /// <param name="route"></param> /// <param name="name">identity used as part of filename </param> public static void RouteDaily(SeriesList list, string name, RouteOptions route = RouteOptions.Both) { if (list.Count == 0) return; string fileName = ""; if (route == RouteOptions.Both || route == RouteOptions.Outgoing) { fileName = GetOutgoingFileName("daily", name, "all"); Console.WriteLine("saving daily outgoing to:"+fileName); HydrometDailySeries.WriteToArcImportFile(list, fileName); } if (route == RouteOptions.Both || route == RouteOptions.Incoming) { foreach (var s in list) { fileName = GetIncommingFileName("daily", s.SiteID, s.Parameter); Console.WriteLine("saving daily incoming to:" + fileName); s.WriteCsv(fileName, true); } } }
private void AddRuleCurves(SeriesList ruleCurves, DateTime[] labelDates, bool dashed) { for (int i = 0; i < ruleCurves.Count; i++) { var s = ruleCurves[i]; var ts = new Steema.TeeChart.Styles.Line(tChart1.Chart); ts.Marks.Visible = true; ts.Color = System.Drawing.Color.Green; ts.Marks.Style = Steema.TeeChart.Styles.MarksStyles.Label; ts.Marks.Arrow.Visible = false; ts.Marks.ArrowLength = 0; ts.Legend.Visible = false; if (dashed) { ts.LinePen.Style = System.Drawing.Drawing2D.DashStyle.Dash; } ts.Title = s.Name; ReadIntoTChart(s, ts); //int idx = FindLabelIndex(s); int idx = FindLabelIndex(s, labelDates[i].Month, labelDates[i].Day); if (idx > 0 && idx < ts.Marks.Items.Count - 1) { ts[idx].Label = s.Name; //ts.Marks.Items[idx].Text = s.Name; ts.Marks.Items[idx].Visible = true; } } }
public override void WriteSeriesHeader(SeriesList list) { WriteLine(HydrometWebUtility.HydrometHeader()); WriteLine("BEGIN DATA"); string headLine = "DATE "; if (m_interval == TimeInterval.Irregular || m_interval == TimeInterval.Hourly) { headLine = "DATE TIME "; } foreach (var item in list) { TimeSeriesName tn = new TimeSeriesName(item.Table.TableName); if (m_interval == TimeInterval.Daily) { //fprintf(stdout,", %4.8s %-4.8s",params[i].station,params[i].pcode); headLine += delim + " " + tn.siteid.PadRight(4) + " " + tn.pcode.PadRight(4); } else { headLine += delim + " " + tn.siteid.PadRight(8) + "" + tn.pcode.PadRight(8); } } headLine = headLine.ToUpper(); WriteLine(headLine); }
public override IExplorerView Run() { Logger.WriteLine("MovingAverageAnalysis.Run()"); SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); view.Messages.Add(list.MissingRecordsMessage); SeriesList myList = new SeriesList(); for (int i = 0; i < list.Count; i++) { if (Explorer.PlotRaw) { myList.Add(list[i]); } if (Explorer.PlotMoving24HourAverage) { Series s24 = Math.MovingAvearge(list[i], 24); myList.Add(s24); } if (Explorer.PlotMoving120HourAverage) { Series s120 = Math.MovingAvearge(list[i], 120); myList.Add(s120); } } view.Title = "Moving Average\n" + list.Text.TitleText(); view.SubTitle = list.MissingRecordsMessage; view.SeriesList = myList; view.DataTable = myList.ToDataTable(true); return view; }
/// <summary> /// prints csv table in this format. /// DateTime,current year, previous, average /// 10/1/2017, 123.34, 69.0, 77.7 /// 10/2/2017, 120.0, 67.0, 77.3 /// </summary> private string PrintAnalysis(string siteID, string parameter) { var years = new List <int>(); var current = DateTime.Now.Date.WaterYear(); var prev = current - 1; years.Add(current); years.Add(prev); DateTime startOf30YearAvearge = HydrometDataUtility.T1Thirty; var x = new SeriesList(); var s = db.GetSeriesFromTableName("daily_" + siteID + "_" + parameter); if (s == null) { return("Error: no data found: " + siteID + "/" + parameter); } x.Add(s); var result = PiscesAnalysis.WaterYears(x, years.ToArray(), true, 10, true, startOf30YearAvearge); var tbl = result.ToDataTable(true); // Console.WriteLine("<pre/>"); StringBuilder sb = new StringBuilder(); sb.Append("DateTime,Current Year,Previous Year,Average"); for (int i = 0; i < tbl.Rows.Count; i++) { var o = tbl.Rows[i]; var str = ((DateTime)o[0]).ToString("yyyy/MM/dd"); sb.AppendLine(); sb.Append(str + "," + o[1].ToString() + "," + o[2].ToString() + "," + o[3].ToString()); } return(sb.ToString()); }
private void CalculateClick(object sender, EventArgs e) { if (tree1.SelectedFolders.Length == 0) { Series[] list = tree1.GetSelectedSeries(); ProcessSelectedSeries(SeriesProcess.Calculate, list); } else if (tree1.SelectedFolders.Length == 1) { SeriesList list = new SeriesList(); foreach (Series s in tree1.GetSeriesRecursive()) { list.Add(s); } ProcessSelectedSeries(SeriesProcess.Calculate, list.ToArray()); } else { MessageBox.Show("Please select a single folder to calculate."); ClearDisplay(); return; } //tree1_SelectionChanged(this, EventArgs.Empty); DrawBasedOnTreeSelection(); }
/// <summary>シリーズ名抽出 メニューアイテム クリックイベント</summary> /// <param name="sender">発生元オブジェクト</param> /// <param name="e">イベント情報</param> private void SeriesTitleMenuItem_Click(object sender, EventArgs e) { string seriesName = this.books.SeriesItems[this.SeriesListView.SelectedIndices[0]].Title; SeriesList selectedList = (SeriesList)this.SeriesListView.SelectedIndices.Cast <int>() .Select(i => this.books.SeriesItems[i]).ToList(); seriesName = selectedList.GetBeginWithMatchTitle(seriesName); if (seriesName == string.Empty) { MessageBox.Show(Resources.ErrorSearchTitleError, Application.ProductName, MessageBoxButtons.OK, MessageBoxIcon.Error); return; } string name = Resources.InfoDetailListIsSetTitle.FormatWith(seriesName); DialogResult result = MessageBox.Show(this, name, Application.ProductName, MessageBoxButtons.YesNo); if (result != DialogResult.Yes) { return; } this.Books.SearchedItems.ToList().ForEach(m => m.ReplaceTitle(seriesName)); this.Books.Refresh(); }
public void DrawSorted(SeriesList list, string title, string subTitle, string xAxisTitle) { Clear(); for (int i = 0; i < list.Count; i++) { PointPairList pairs = new PointPairList(); foreach (var pt in list[i]) { pairs.Add(pt.Percent, pt.Value); } LineItem myCurve = pane.AddCurve(list[i].Appearance.LegendText, pairs, Color.Red);//,SymbolType.Diamond); myCurve.Symbol.Fill.Type = FillType.None; } pane.XAxis.Title.Text = xAxisTitle; pane.XAxis.Type = AxisType.Linear; pane.YAxis.Scale.Mag = 0; pane.YAxis.Scale.Format = "#,#"; pane.XAxis.Scale.Format = ""; //pane.XAxis.Scale.MajorUnit = DateUnit.Day; //pane.XAxis.Scale.MajorStep = 1; LabelYaxis(list); chart1.AxisChange(); chart1.Refresh(); }
/// <summary> /// Gets the queried series and generates simple text output /// </summary> /// <param name="query"></param> /// <returns></returns> private void WriteSeries(SeriesList list) { m_formatter.WriteSeriesHeader(list); int daysStored = 30; if (m_formatter.Interval == TimeInterval.Daily) { daysStored = 3650; // 10 years } if (m_formatter.Interval == TimeInterval.Monthly) { daysStored = 36500; } TimeRange timeRange = new TimeRange(start, end); foreach (TimeRange item in timeRange.Split(daysStored)) { var interval = m_formatter.Interval; var tbl = Read(list, item.StartDate, item.EndDate, interval, m_formatter.OrderByDate); m_formatter.PrintDataTable(list, tbl); } m_formatter.WriteSeriesTrailer(); }
private static SeriesList ReadHydrometOptionalData(int wy, string query, DateRange range) { SeriesList rval = new SeriesList(); string[] tokens = query.Split(','); foreach (var pair in tokens) { var s = pair.TrimStart().TrimEnd(); string cbtt = s.Split(' ')[0]; string pcode = s.Split(' ')[1]; Series hm = new HydrometDailySeries(cbtt, pcode); hm.Name = Convert.ToString(wy) + " " + cbtt.ToUpper() + " " + pcode.ToUpper(); DateTime t1 = range.DateTime1; DateTime t2 = range.DateTime2; hm.Read(t1, t2); hm.RemoveMissing(); if (hm.Count > 0) { rval.Add(hm); } } return(rval); }
private void AddSeries_Click(object sender, RoutedEventArgs e) { var addSeriesWindow = new AddSeriesWindow(); addSeriesWindow.SeriesAdded += (_, e) => SeriesList.Add(e.Entity); addSeriesWindow.Show(); }
public void DrawSorted(SeriesList list, string title, string subTitle, string xAxisTitle) { Clear(); if (list.Count == 0) { return; } for (int i = 0; i < list.Count; i++) { PointPairList pairs = new PointPairList(); foreach (var pt in list[i]) { pairs.Add(pt.Percent, pt.Value); } var color = Default.GetSeriesColor(pane.CurveList.Count); LineItem myCurve = pane.AddCurve(list[i].Appearance.LegendText, pairs, color); myCurve.Symbol.IsVisible = false; myCurve.Line.Width = Default.GetSeriesWidth(pane.CurveList.Count); } pane.Title.Text = title + "\n" + subTitle; pane.XAxis.Title.Text = xAxisTitle; FormatBottomAxisNumeric(); pane.XAxis.Scale.Format = ""; pane.XAxis.Scale.MajorStep = 5; FormatYAxisStandard(); SetPaneVisible(true); LabelYaxis(list); RefreshChart(chart1); }
public override IExplorerView Run() { SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); if (Explorer.SelectedSeries.Length == 1 && Explorer.MergeSelected) { // merge single Year Traces. var s = list.MergeYearlyScenarios(); list = new SeriesList(); list.Add(s); } SeriesList myList = list; if (Explorer.StatisticalMethods != StatisticalMethods.None) { myList = list.AggregateAndSubset(Explorer.StatisticalMethods, Explorer.MonthDayRange, Explorer.BeginningMonth); } Logger.WriteLine("Drawing Graph"); if (myList.Count == 1 && myList[0].TimeInterval == TimeInterval.Monthly) { myList.DateFormat = "MMM-yyyy"; } view.SeriesList = myList; string title = list.Text.TitleText(); if (Explorer.SubtractFromBaseline) title = "Subtract Reference \n" + title; view.Title = title; view.SubTitle = list.MissingRecordsMessage; //view.DataTable = myList.CompositeTable; return view; }
public override IExplorerView Run() { Logger.WriteLine("MovingAverageAnalysis.Run()"); SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); view.Messages.Add(list.MissingRecordsMessage); SeriesList myList = new SeriesList(); for (int i = 0; i < list.Count; i++) { if (Explorer.PlotRaw) { myList.Add(list[i]); } if (Explorer.PlotMoving24HourAverage) { Series s24 = Math.MovingAvearge(list[i], 24); myList.Add(s24); } if (Explorer.PlotMoving120HourAverage) { Series s120 = Math.MovingAvearge(list[i], 120); myList.Add(s120); } } view.Title = "Moving Average\n" + list.Text.TitleText(); view.SubTitle = list.MissingRecordsMessage; view.SeriesList = myList; view.DataTable = myList.ToDataTable(true); return(view); }
/// <summary> /// Estimates missing data by computing the percent /// of normal of surrounding data (by month). /// if all stations are missing use the average /// </summary> /// <param name="forecastDate"></param> internal static void EstimateMissingByGroup(SeriesList list, DateTime forecastDate) { // filter series list to remove calculation Series var subList = RemoveCalculationSeries(list); if (subList.Count == 0) { return; } for (int dateIndex = 0; dateIndex < subList[0].Count; dateIndex++) { double percentOfNormal = ComputePercentOfNormal(subList, dateIndex); foreach (var s in subList) { string cbtt = s.ConnectionStringToken("cbtt"); string pcode = s.ConnectionStringToken("pcode"); if (s[dateIndex].IsMissing && s[dateIndex].DateTime <= forecastDate.EndOfMonth()) { var pt = HydrometMonthlySeries.ReadAverageValue(cbtt, pcode, s[dateIndex].DateTime); if (!pt.IsMissing) { pt.Value = pt.Value * percentOfNormal; s[dateIndex] = pt; Console.WriteLine("using estimated data " + cbtt + " " + pcode + " " + s[dateIndex].DateTime.ToString("yyyy MMM")); } } } } }
private void Button_Click_ShowGraph(object sender, RoutedEventArgs e) { GraphWindow gw = new GraphWindow(GraphName.Text, "epoch", "error", SeriesList); gw.Show(); SeriesList.Clear(); }
/// <summary> /// prints csv table in this format. /// DateTime,current year, previous, average /// 10/1/2017, 123.34, 69.0, 77.7 /// 10/2/2017, 120.0, 67.0, 77.3 /// </summary> private void PrintAnalysis(TimeRange r, string siteID, string parameter) { Console.Write("Content-type: text/csv\n\n"); var years = new List <int>(); var current = DateTime.Now.Date.WaterYear(); var prev = current - 1; years.Add(current); years.Add(prev); DateTime startOf30YearAvearge = HydrometDataUtility.T1Thirty; var x = new SeriesList(); Series s = new HydrometDailySeries(siteID, parameter, HydrometHost.PNLinux); x.Add(s); var result = PiscesAnalysis.WaterYears(x, years.ToArray(), true, 10, true, startOf30YearAvearge); var tbl = result.ToDataTable(true); // Console.WriteLine("<pre/>"); Console.WriteLine("DateTime,Current Year,Previous Year,Average"); for (int i = 0; i < tbl.Rows.Count; i++) { var o = tbl.Rows[i]; var str = ((DateTime)o[0]).ToString("yyyy/MM/dd"); Console.WriteLine(str + "," + o[1].ToString() + "," + o[2].ToString() + "," + o[3].ToString()); } }
public SeriesList CreateSeries(HydrometHost server) { SeriesList rval = new SeriesList(); for (int i = 0; i < Count; i++) { if (m_interval[i] == "mpoll" || m_interval[i] == "m") { rval.Add(new HydrometMonthlySeries(m_cbtt[i], m_pcode[i], server)); } if (m_interval[i] == "daily" || m_interval[i] == "arc" || m_interval[i] == "d") { rval.Add(new HydrometDailySeries(m_cbtt[i], m_pcode[i], server)); } if (m_interval[i] == "day" || m_interval[i] == "instant" || m_interval[i] == "i") { rval.Add(new HydrometInstantSeries(m_cbtt[i], m_pcode[i], server)); } } return(rval); }
/// <summary> /// Print DataTable composed of tablename,datetime,value[,flag] /// in wiski ZRXP format public override void PrintDataTable(SeriesList list, System.Data.DataTable table) { bool hasFlagCoumn = table.Columns.Count == 4; string prevTableName = ""; for (int i = 0; i < table.Rows.Count; i++) { var row = table.Rows[i]; var tableName = row[0].ToString(); var flag = ""; if (hasFlagCoumn) flag = FormatFlag(row[3]); if (tableName != prevTableName) { string seriesID = tableName.ToUpper().Replace("INSTANT_",""); seriesID = seriesID.Replace("DAILY", ""); WriteLine("\n#REXCHANGE" +seriesID + "|*|RTIMELVLhigh-resolution|*|RINVAL-777|*|"); prevTableName = tableName; } string line = FormatDate(row[1]); if (!GoodFlag(flag) ) line += " -777 "; else line += " " + FormatNumber(row[2]); WriteLine(line); } }
public static SeriesList GetSeries(string[] sites, UsgsRealTimeParameter parameter) { SeriesList rval = new SeriesList(); foreach (string site in sites) { UsgsRealTimeSeries s = new UsgsRealTimeSeries(site, parameter); rval.Add(s); } return rval; }
/// <summary> /// Creates a list of water year based data all aligned to year 2000 /// to allow comparison. /// </summary> /// <param name="list">intput series</param> /// <param name="years">water years</param> /// <param name="avg30yr">when true also includes 30 year average. If only 5 years are avaliable a 5 year average is created</param> /// <param name="beginningMonth">series starting month number</param> /// <returns></returns> public static SeriesList WaterYears(SeriesList list, int[] years, bool avg30yr, int beginningMonth, bool alwaysShiftTo2000 = false) { SeriesList wySeries = new SeriesList(); for (int j = 0; j < list.Count; j++) { for (int i = 0; i < years.Length; i++) { YearRange yr = new YearRange(years[i], beginningMonth); Series s = list[j]; s.Clear(); s.Read(yr.DateTime1, yr.DateTime2); Logger.WriteLine("Read() " + yr.ToString() + " count = " + s.Count); foreach (string msg in s.Messages) { Logger.WriteLine(msg); } if (s.Count > 0 && s.CountMissing() != s.Count) { Series s2 = TimeSeries.Math.ShiftToYear(s, 2000); if (years.Length == 1 && !alwaysShiftTo2000 && !avg30yr) { s2 = s; } if (list.HasMultipleSites) s2.Appearance.LegendText = years[i].ToString() + " " + list[j].Name; else s2.Appearance.LegendText = years[i].ToString(); wySeries.Add(s2); } else { Logger.WriteLine("year :" + years[i] + "skipping series with no data " + s.Name + " " + s.Parameter); } } if (avg30yr) { list[j].Read(DateTime.Now.Date.AddYears(-30), DateTime.Now.Date); Series s30 = Math.MultiYearDailyAverage( list[j], beginningMonth); if (s30.Count > 0) wySeries.Add(s30); } } wySeries.Type = SeriesListType.WaterYears; if (wySeries.Count > 1) { wySeries.DateFormat = "MM/dd"; } return wySeries; }
public override IExplorerView Run() { Logger.WriteLine("SummaryHydrographAnalysis.Run()"); SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); if (Explorer.SelectedSeries.Length == 1 && Explorer.MergeSelected) { // merge single Year Traces. list.RemoveMissing(); var s = list.MergeYearlyScenarios(); list = new SeriesList(); list.Add(s); } view.Messages.Add(list.MissingRecordsMessage); string title = list.Text.TitleText(); string subTitle = list.MissingRecordsMessage; SeriesList myList = new SeriesList(); list.RemoveMissing(); if (Explorer.AlsoPlotYear && list.Count == 1) { YearRange yearRng = new YearRange(Explorer.PlotYear, Explorer.BeginningMonth); DateTime t1 = yearRng.DateTime1; DateTime t2 = yearRng.DateTime2; Series s = Math.Subset(list[0], t1, t2); s.Appearance.LegendText = yearRng.Year.ToString(); view.Messages.Add(yearRng.Year.ToString() + " included as separate series "); myList.Add(s); myList.Add(list.SummaryHydrograph(Explorer.ExceedanceLevels, t1, Explorer.PlotMax, Explorer.PlotMin, Explorer.PlotAvg,true));//,true)); } else { DateTime t = new DateTime(DateTime.Now.Year, Explorer.BeginningMonth, 1); myList = list.SummaryHydrograph(Explorer.ExceedanceLevels, t, Explorer.PlotMax, Explorer.PlotMin, Explorer.PlotAvg,true);//,true); } Explorer.WriteProgressMessage("drawing graph", 80); view.Title = title; view.SubTitle = subTitle; view.SeriesList = myList; view.DataTable = myList.ToDataTable(true); //view.Draw(); return view; }
public override void WriteSeriesHeader(SeriesList list) { WriteLine("<PRE>"); string headLine = "DateTime"; foreach (var item in list) { TimeSeriesName tn = new TimeSeriesName(item.Table.TableName); headLine += "," + tn.siteid + "_" + tn.pcode; } WriteLine(headLine); }
public override void WriteSeriesHeader(SeriesList list) { WriteLine("<table border=1>"); if (m_printHeader) { WriteLine("<tr>"); WriteLine("<th>DateTime</th>"); for (int i = 0; i < list.Count; i++) { TimeSeriesName tn = new TimeSeriesName(list[i].Table.TableName); WriteLine("<th>" + tn.siteid + "_" + tn.pcode + "</th>"); if( PrintFlags ) WriteLine("<th>flag</th>"); } WriteLine("</tr>"); } }
/// <summary> /// Print DataTable composed of tablename,datetime,value[,flag] /// with columns for each tablename /// </summary> /// <param name="list"></param> /// <param name="table"></param> public virtual void PrintDataTable(SeriesList list, DataTable table ) { var t0 = ""; if (table.Rows.Count > 0) t0 = FormatDate(table.Rows[0][1]); var vals = new string[list.Count]; var flags = new string[list.Count]; var dict = new Dictionary<string, int>(); for (int i = 0; i < list.Count; i++) { dict.Add(list[i].Table.TableName, i); } string t = ""; bool printThisRow = false; for (int i = 0; i < table.Rows.Count; i++) { var row = table.Rows[i]; t = FormatDate(row[1]); if (t != t0) { if (printThisRow) PrintRow(t0, vals, flags); vals = new string[list.Count]; flags = new string[list.Count]; t0 = t; } vals[dict[row[0].ToString()]] = FormatNumber(row[2]); flags[dict[row[0].ToString()]] = FormatFlag(row[3]); DateTime date = Convert.ToDateTime(row[1]); bool topOfHour = date.Minute == 0; printThisRow = HourlyOnly == false || (HourlyOnly && topOfHour); } if (printThisRow) PrintRow(t, vals, flags); }
public void PiecewisePolynomialRatingEquationLindCouleeWasteway1() { PolynomialEquation eq1 = new PolynomialEquation( new double[]{0.0},-1.0, 1.86 ,"-1 < stage <= 1.86 "); PolynomialEquation eq2 = new PolynomialEquation( new double[]{-28.4314,15.2857},1.861, 2.00," 1.86 < stage <= 2.0"); PolynomialEquation eq3 = new PolynomialEquation( new double[]{-0.3522,88.1421,-96.6995,31.4217,-2.3978},2.001, 6.00," 2.0 < stage <= 6.0 "); PolynomialEquation eq4 = new PolynomialEquation( new double[]{-769.4138,249.0490},6.001, 10.00," 6.0 < stage "); PolynomialEquation[] equationList = {eq1,eq2,eq3,eq4}; Series s = TestData.LindCouleeWW1DailyAverageStage2004; Series instant = TestData.LindCouleeWW1InstantanousStage2004; DateTime t1 = new DateTime(2004,1,2); DateTime t2 = new DateTime(2004,12,18); // at 12:00 am.. will capture 17th..not 18 th // compute polynomial based on daily average stage. Series p = Math.Polynomial(s,equationList,t1,t2); // compute instantanious flow first Series p2 = Math.Polynomial(instant,equationList,t1,t2); // get average second Series avg = Math.TimeWeightedDailyAverage(p2); SeriesList list = new SeriesList(); list.Add(s); list.Add(p); list.Add(avg); list.WriteToConsole(); //p.WriteToConsole(); }
public override IExplorerView Run() { Logger.WriteLine("TraceAnalysis.Run()"); SeriesList list = Explorer.CreateSelectedSeries(); ReadSeriesList(list); string title = list.Text.TitleText(); string subTitle = list.MissingRecordsMessage; // [JR] don't perform trace analysis if trace count < 10... if (list.Count < 10) { view.Messages.Add("Trace exceedance analysis is not available if trace count < 10"); view.Title = title; view.SubTitle = subTitle; view.SeriesList = list; view.DataTable = list.ToDataTable(true); return view; } // This seems to be common between all the analysis options if (Explorer.SelectedSeries.Length == 1 && Explorer.MergeSelected) { // merge single Year Traces. list.RemoveMissing(); var s = list.MergeYearlyScenarios(); list = new SeriesList(); list.Add(s); } view.Messages.Add(list.MissingRecordsMessage); list.RemoveMissing(); // Initialize the output container SeriesList traceAnalysisList = new SeriesList(); // Get exceedance curves if (Explorer.traceExceedanceAnalysis) { traceAnalysisList = getTraceExceedances(list, Explorer.ExceedanceLevels, Explorer.AlsoPlotTrace, Explorer.PlotTrace, Explorer.PlotMinTrace, Explorer.PlotAvgTrace, Explorer.PlotMaxTrace); } // Get aggregated values if (Explorer.traceAggregationAnalysis) { string sumType = ""; if (Explorer.sumCYRadio) { sumType = "CY"; } else if (Explorer.sumWYRadio) { sumType = "WY"; } else if (Explorer.sumCustomRangeRadio) { sumType = "XX"; } else { } traceAnalysisList = getTraceSums(list, sumType); } // [JR] Add other analysis/report building options here... Explorer.WriteProgressMessage("drawing graph", 80); view.Title = title; view.SubTitle = subTitle; view.SeriesList = traceAnalysisList; view.DataTable = traceAnalysisList.ToDataTable(true); //view.Draw(); return view; }
private void ProcessFile(RouteOptions routing, string fileName) { string importTag = "import"; // used to make friendly export filename try { TextFile tf = new TextFile(fileName); SeriesList sl = new SeriesList(); if (HydrometInstantSeries.IsValidDMS3(tf)) { importTag = "decodes"; sl = HydrometInstantSeries.HydrometDMS3DataToSeriesList(tf); } else if( HydrometDailySeries.IsValidArchiveFile(tf)) { importTag = "htools"; sl = HydrometDailySeries.HydrometDailyDataToSeriesList(tf); } else if (LoggerNetFile.IsValidFile(tf)) { LoggerNetFile lf = new LoggerNetFile(tf); if (lf.IsValid && Array.IndexOf(validSites, lf.SiteName) >= 0) { importTag = lf.SiteName; sl = lf.ToSeries(validPcodes); } } //else if (DecodesRawFile.IsValidFile(tf)) //{ // DecodesRawFile df = new DecodesRawFile(tf); // importTag = "raw"; // sl = df.ToSeries(); //} else { Logger.WriteLine("skipped Unknown File Format: " + fileName); return; } m_importer = new TimeSeriesImporter(m_db, routing,m_saveOption); Console.WriteLine("Found " + sl.Count + " series in " + fileName); foreach (var item in sl) { Logger.WriteLine(item.Table.TableName); } if (sl.Count > 0) { m_importer.Import(sl, m_computeDependencies, m_computeDailyDependencies,importTag); FileUtility.MoveToSubDirectory(Path.GetDirectoryName(fileName), "attic", fileName); } } catch (Exception ex) { Logger.WriteLine("Error:" + ex.Message); Console.WriteLine("Error: skipping file, will move to error subdirectory " + fileName); FileUtility.MoveToSubDirectory(Path.GetDirectoryName(fileName), "error", fileName); } }
public void DrawWaterYears(SeriesList list, string title, string subTitle, bool multiLeftAxis = false) { Clear(); if (list.Count == 0) { return; } CreateSeries(list, title, subTitle,true,multiLeftAxis); for (int i = 0; i < list.Count; i++) { FillTimeSeries(list[i], chart1.GraphPane.CurveList[i]); } FormatBottomAxisDate(false); FormatYAxisStandard(); SetPaneVisible(true); LabelYaxis(list); RefreshChart(chart1); }
/// <summary> /// Creates basic graph with empty series /// </summary> private void CreateSeries(SeriesList list, string title, string subTitle, bool undoZoom, bool multiLeftAxis) { Clear(undoZoom); pane.Title.Text = title + "\n" + subTitle; LineItem series = new LineItem(""); for (int i = 0; i < list.Count; i++) { series = CreateSeries(list.Text.Text[i]); //string units = list[i].Units; pane.CurveList.Add(series); } }
/// <summary> /// Create a single datatable by reading from multiple tables /// </summary> /// <param name="list"></param> /// <param name="t1"></param> /// <param name="t2"></param> /// <param name="interval"></param> /// <param name="orderByDate"></param> /// <returns></returns> private DataTable Read(SeriesList list, DateTime t1, DateTime t2, TimeInterval interval, bool orderByDate=true) { var sql = CreateSQL(list, t1, t2, interval,orderByDate); if (sql == "") return new DataTable(); var tbl = db.Server.Table("tbl", sql); return tbl; }
/// <summary> /// MLR Interpolation Report /// Look for '[JR]' in this method to find the code regions that could use a fix or more testing... /// </summary> /// <param name="sInputs"></param> /// <param name="t1"></param> /// <param name="t2"></param> /// <param name="months"></param> /// <param name="fitTolerance"></param> /// <param name="waterYear"></param> public static MultipleLinearRegressionResults MlrInterpolation(SeriesList sList, int[] months, double fitTolerance, bool fillSelectedMonths = false) { // KT if there is not enough data (for example only 1 pont ) need to ignore that data set? MultipleLinearRegressionResults rval = new MultipleLinearRegressionResults(); // Populate SeriesLists var sListFill = new SeriesList(); foreach (var item in sList) { sListFill.Add(item.Copy()); } // Get dates to be filled with interpolated values var missing = sList[0].GetMissing(); if (fillSelectedMonths) //overwrites the 'missing' variable with another Series that only contains the selected dates in the input { Series missingSubset = new Series(); foreach (var row in missing) { if (months.Contains(row.DateTime.Month)) { missingSubset.Add(row); } } missing = missingSubset; } // Delete common dates where at least 1 data point is missing for any of the input series // This is done because the MLR routine does not support missing data. Missing data causes // data misalignments and throws off the regression... This section also deletes data for // months that are not tagged in the input for (int i = sList[0].Count - 1; i >= 0; i--) //start from the bottom of the list to bypass indexing problems { for (int j = 0; j < sList.Count; j++) { Point jthPt = sList[j][i]; if (jthPt.IsMissing || !months.Contains(jthPt.DateTime.Month)) { for (int k = 0; k < sList.Count; k++) //delete this date from all Series in the list { sList[k].RemoveAt(i); } break; } } } // Initialize MLR report and populate header List<string> mlrOut = new List<string>(); mlrOut.Add(""); mlrOut.Add("MLR Output\t\t\t\t\tRun Date: " + DateTime.Now); mlrOut.Add("Estimated Series: " + sList[0].Name); var sEstimators = ""; for (int i = 1; i < sList.Count; i++) { sEstimators = sEstimators + sList[i].Name + ", "; } mlrOut.Add("Estimator Series: " + sEstimators.Remove(sEstimators.Length - 2)); mlrOut.Add("Regression Date Range: " + sList[0].MinDateTime + " - " + sList[0].MaxDateTime); var monEstimators = ""; foreach (var item in months) { monEstimators = monEstimators + item + ", "; } mlrOut.Add("Months Used: " + monEstimators.Remove(monEstimators.Length - 2)); mlrOut.Add(""); mlrOut.Add("===================================================================================="); // Initialize output SeriesList var sOutList = new SeriesList(); // Loop through each SeriesList combination for MLR for (int k = 1; k <= sList.Count - 1; k++) { AllPossibleCombination combinationData = new AllPossibleCombination(sList.Count - 1, k); //uses StackOverflow Class for combinations var combinationList = combinationData.GetCombinations(); // Loop through each combination in the list and run MLR foreach (var combo in combinationList) { // Build MLR method inputs // xData is the different Series values that will be used to generate the MLR equation, all index > 0 in the SeriesList. Matrix format // yData is the target Series values that is the target for MLR, index = 0 of the SeriesList. Vector format double[][] xData = new double[sList[0].Count][]; double[] yData = new double[sList[0].Count]; // Loop through the dates to populate the xData and the yData for (int i = 0; i < sList[0].Count; i++) { var jthRow = new List<double>(); // Loop through each Series in SeriesList for (int j = 0; j < combo.Count(); j++) { jthRow.Add(sList[combo[j]][i].Value); } xData[i] = jthRow.ToArray(); yData[i] = sList[0][i].Value; } // MLR via Math.Net.Numerics double[] mlrCoeffs = MathNet.Numerics.LinearRegression.MultipleRegression.QR(xData, yData, true); //this is more stable than the method below //double[] p2 = MathNet.Numerics.Fit.MultiDim(xData, yData, true); //this method is faster but less stable // Evaluate fit Series sModeled = sList[0].Clone(); // Equations are of the form y = x1(s1) + x2(s2) + ... + xN the loop handles the inner part of the equation if it exists x2(s2) + ... // while the lines before and after the loop handles the first and last terms x1(s1) and xN respectively sModeled = sList[combo[0]] * mlrCoeffs[1]; for (int i = 2; i < mlrCoeffs.Count(); i++) { sModeled = sModeled + sList[combo[i - 1]] * mlrCoeffs[i]; } //magic number -1 is used so the correct corresponding Series is used with the correct mlr-coefficient sModeled = sModeled + mlrCoeffs[0]; var rVal = MathNet.Numerics.GoodnessOfFit.R(sModeled.Values, sList[0].Values);//this is the statistic reported by the FORTRAN code var rSqd = MathNet.Numerics.GoodnessOfFit.RSquared(sModeled.Values, sList[0].Values); //this is the R-squared for model fit // Fill missing dates and generate a SeriesList for final Series output var sOut = new Series(); //initialize Series to be added to output SeriesList foreach (var fillT in missing) { double fillVal; try { // This evaluates the equation generated during the MLR estimation. Same equation-code format as above fillVal = sListFill[combo[0]][fillT.DateTime].Value * mlrCoeffs[1]; for (int i = 2; i < mlrCoeffs.Count(); i++) { fillVal = fillVal + sListFill[combo[i - 1]][fillT.DateTime].Value * mlrCoeffs[i]; } fillVal = fillVal + mlrCoeffs[0]; if (fillVal < 0.0) { sOut.Add(fillT.DateTime, Point.MissingValueFlag, "NoDataForInterpolation"); } else { sOut.Add(fillT.DateTime, fillVal, rVal.ToString("F05")); } //[JR] this assigns the R value as the flag, can be switched to R-Squared... } catch { sOut.Add(fillT.DateTime, Point.MissingValueFlag, "NoDataForInterpolation"); } } // Add the output Series to a SeriesList sOutList.Add(sOut); // Populate report mlrOut.Add(""); string equationString = "MLR Equation: " + sList[0].Name + " = "; for (int ithCoeff = 1; ithCoeff < mlrCoeffs.Count(); ithCoeff++) { equationString = equationString + mlrCoeffs[ithCoeff].ToString("F04") + "(" + sList[combo[ithCoeff - 1]].Name + ") + "; } equationString = equationString + mlrCoeffs[0].ToString("F04"); mlrOut.Add(equationString); mlrOut.Add("Correlation Coefficient = " + rVal.ToString("F04")); mlrOut.Add("R-Squared Coefficient = " + rSqd.ToString("F04")); mlrOut.Add("MLR Estimates: "); foreach (var item in sOut) { mlrOut.Add("\t\t" + item.ToString(true)); } mlrOut.Add(""); mlrOut.Add("------------------------------------------------------------------------------------"); } } // Generate MLR report //TextFile tf = new TextFile(mlrOut.ToArray()); //var fn = FileUtility.GetTempFileName(".txt"); //tf.SaveAs(fn); //System.Diagnostics.Process.Start(fn); rval.Report = mlrOut.ToArray(); // Generate output Series var sOutFinal = sListFill[0].Copy(); // Rmove the Points to be filled in the original input Series for (int i = missing.Count - 1; i >= 0; i--) { sOutFinal.RemoveAt(sOutFinal.IndexOf(missing[i].DateTime)); } // Find the best fit out of all the estimated values // Loops through the dates foreach (var sRow in sOutList[0]) { DateTime estT = sRow.DateTime; List<double> flagItems = new List<double>();//container for flag values List<double> valItems = new List<double>();//container for estiamted values // Loops through each estimate for (int i = 0; i < sOutList.Count; i++) { Point estPt = sOutList[i][estT]; valItems.Add(estPt.Value); if (estPt.Value < 0.0) //add 0 correlation value if the estimated value < 0, [JR] this prevents the use of this routine to estimate negative values... { flagItems.Add(0.0); } else { flagItems.Add(Convert.ToDouble(estPt.Flag)); } } var maxFit = flagItems.Max(); var bestFitVal = valItems[flagItems.IndexOf(maxFit)]; if (maxFit >= fitTolerance) //add the value if it exceeds the specified tolerance { sOutFinal.Add(estT, bestFitVal, "E"); } else //add missing since there is no acceptable estimate to fill this missing value { sOutFinal.AddMissing(estT); } } //return sOutFinal; rval.EstimatedSeries = sOutFinal; return rval; }
private string BuildUnionSQL(SeriesList list, DateTime t1, DateTime t2, int startIndex) { string sql = ""; for (int i = startIndex; i < list.Count; i++) { var tableName = list[i].Table.TableName; if (!db.Server.TableExists(tableName)) { sql += " \nSELECT '" + tableName + "' as tablename , current_timestamp as datetime, -998877.0 as value, '' as flag where 0=1 "; } else { sql += " \nSELECT '" + tableName + "' as tablename, datetime,value,flag FROM " + tableName; sql += DateWhereClause(t1, t2); } if (i != list.Count - 1) sql += " UNION ALL \n"; } return sql; }
public static Series SumSetMissingToZero(Series[] items) { var rval = new Series(); if (items.Length > 0) rval = items[0].Copy(); SeriesList list = new SeriesList(); list.AddRange(items); rval = Math.FillMissingWithZero(rval, list.MinDateTime, list.MaxDateTime); for (int i = 1; i < items.Length; i++) { rval = rval + Math.FillMissingWithZero(items[i], list.MinDateTime, list.MaxDateTime); } return rval; }
public static Series MLRInterpolationPisces(double fitTolerance, params Series[] s) { SeriesList sList = new SeriesList(); foreach (var item in s) { sList.Add(item); } var sOut = Reclamation.TimeSeries.Estimation.MultipleLinearRegression.MlrInterpolation(sList, new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 }, fitTolerance); return sOut.EstimatedSeries; }
public void LindCoulee2004() { Series s = TestData.LindCouleeWW1InstantanousStage2004; //Point pt = Math.Calculator.AverageForDay(s,DateTime.Parse("2004-12-20")); Series avg = Math.TimeWeightedDailyAverage(s); // Console.WriteLine("avg"); //avg.WriteToConsole(); Console.WriteLine(avg[0].DateTime.ToString("yyyy-MM-dd HH:mm:ss.ffff")); Console.WriteLine("Math.Calculator.DailyAverage(s).Count = "+avg.Count); Series dbAverage = TestData.LindCouleeWW1DailyAverageStage2004; Console.WriteLine("TestData.LindCouleeWW1DailyAverageStage2004.Count = "+dbAverage.Count); Series diff = avg - dbAverage; SeriesList list = new SeriesList(); list.Add(avg); list.Add(dbAverage); list.Add(diff); list.WriteToConsole(); Console.WriteLine("summing difference"); double d = Math.Sum(diff); Assert.AreEqual(0,d,0.1); // actual is about 0.05 Console.WriteLine("sum of differences = "+d); Console.WriteLine("sum of daily "+Math.Sum(avg)); Assert.AreEqual(dbAverage.Count-1,avg.Count); for(int i=0;i<avg.Count; i++) { // database has one (missing) value at beginning we skip that in comparison Assert.AreEqual(dbAverage[i+1].ToString(),avg[i].ToString()); Assert.AreEqual(dbAverage[i+1].Value,avg[i].Value,0.0001); Assert.AreEqual(dbAverage[i+1].DateTime.Ticks , avg[i].DateTime.Ticks,"on line "+i); } }
private SeriesList CreateSeriesList() { var interval = m_formatter.Interval; TimeSeriesName[] names = GetTimeSeriesName(m_collection, interval); var tableNames = (from n in names select n.GetTableName()).ToArray(); var sc = db.GetSeriesCatalog("tablename in ('" + String.Join("','", tableNames) + "')"); SeriesList sList = new SeriesList(); foreach (var tn in names) { Series s = new Series(); s.TimeInterval = interval; if (sc.Select("tablename = '" + tn.GetTableName() + "'").Length == 1) { s = db.GetSeriesFromTableName(tn.GetTableName()); } s.Table.TableName = tn.GetTableName(); sList.Add(s); } return sList; }
/// <summary> /// Gets the queried series and generates simple text output /// </summary> /// <param name="query"></param> /// <returns></returns> private void WriteSeries(SeriesList list) { m_formatter.WriteSeriesHeader(list); int daysStored = 30; if (m_formatter.Interval == TimeInterval.Daily) daysStored = 3650; // 10 years if (m_formatter.Interval == TimeInterval.Monthly) daysStored = 36500; TimeRange timeRange = new TimeRange(start, end); foreach (TimeRange item in timeRange.Split(daysStored)) { var interval = m_formatter.Interval; var tbl = Read(list, item.StartDate, item.EndDate, interval,m_formatter.OrderByDate); m_formatter.PrintDataTable(list, tbl); } m_formatter.WriteSeriesTrailer(); }
/// <summary> /// Build a SeriesList with custom aggregation /// </summary> /// <param name="sListIn"></param> /// <param name="sumType"></param> /// <returns></returns> private SeriesList getTraceSums(SeriesList sListIn, string aggType) { SeriesList traceAnalysisList = new SeriesList(); foreach (var s in sListIn) { var sNew = new Series(); if (aggType == "CY") { sNew = Reclamation.TimeSeries.Math.AnnualSum(s, new MonthDayRange(1, 1, 12, 31), 1); } else if (aggType == "WY") { sNew = Reclamation.TimeSeries.Math.AnnualSum(s, new MonthDayRange(10, 1, 9, 30), 10); } else if (aggType == "XX") { sNew = Reclamation.TimeSeries.Math.AnnualSum(s, Explorer.MonthDayRange, Explorer.MonthDayRange.Month1); } else { view.Messages.Add(""); } sNew.TimeInterval = s.TimeInterval; sNew.Units = s.Units; traceAnalysisList.Add(sNew); } return traceAnalysisList; }
/// <summary> /// Build a SeriesList with the trace exceedances /// </summary> /// <param name="sListIn"></param> /// <param name="excLevels"></param> /// <param name="xtraTraceCheck"></param> /// <param name="xtraTrace"></param> /// <returns></returns> private SeriesList getTraceExceedances(SeriesList sListIn, int[] excLevels, bool xtraTraceCheck, string xtraTrace, bool plotMinTrace, bool plotAvgTrace, bool plotMaxTrace) { SeriesList traceAnalysisList = new SeriesList(); // Define the index numbers from the serieslist wrt the selected exceedance level List<int> sExcIdxs = new List<int>(); foreach (var item in excLevels) { var sNew = new Series(); sNew.TimeInterval = sListIn[0].TimeInterval; sNew.Units = sListIn[0].Units; sNew.ScenarioName = item + "%Exceedance"; traceAnalysisList.Add(sNew); int excIdx; if (item > 50) { excIdx = Convert.ToInt16(System.Math.Ceiling(sListIn.Count * (100.0 - Convert.ToDouble(item)) / 100.0)); } else { excIdx = Convert.ToInt16(System.Math.Floor(sListIn.Count * (100.0 - Convert.ToDouble(item)) / 100.0)); } sExcIdxs.Add(excIdx); } // Add min trace if selected if (plotMinTrace) { var sNew = new Series(); sNew.TimeInterval = sListIn[0].TimeInterval; sNew.Units = sListIn[0].Units; sNew.ScenarioName = "Min"; traceAnalysisList.Add(sNew); sExcIdxs.Add(0); } // Add max trace if selected if (plotMaxTrace) { var sNew = new Series(); sNew.TimeInterval = sListIn[0].TimeInterval; sNew.Units = sListIn[0].Units; sNew.ScenarioName = "Max"; traceAnalysisList.Add(sNew); sExcIdxs.Add(sListIn.Count - 1); } // Define average trace container var sAvg = new Series(); sAvg.TimeInterval = sListIn[0].TimeInterval; sAvg.Units = sListIn[0].Units; sAvg.ScenarioName = "Avg"; // Populate the output serieslist with the exceddance curves var dTab = sListIn.ToDataTable(true); for (int i = 0; i < dTab.Rows.Count; i++) { var dRow = dTab.Rows[i]; DateTime t = DateTime.Parse(dRow[0].ToString()); var values = dRow.ItemArray; // Put the ith timestep values in a C# List and sort by ascending var valList = new List<double>(); var valSum = 0.0; for (int j = 1; j < values.Length; j++) { valList.Add(Convert.ToDouble(values[j].ToString())); valSum += Convert.ToDouble(values[j].ToString()); } valList.Sort(); // Grab the index corresponding to the selected exceedance level and populate the output serieslist for (int j = 0; j < sExcIdxs.Count; j++) { traceAnalysisList[j].Add(t, valList[sExcIdxs[j]],"interpolated"); } // Populate the average trace series if (plotAvgTrace) { sAvg.Add(t, valSum / valList.Count, "interpolated"); } } // Add average trace if selected if (plotAvgTrace) { traceAnalysisList.Add(sAvg); } // Add an extra reference trace if defined if (xtraTraceCheck) { //xtraTrace contains the run name "Name" var scenarioTable = Explorer.Database.GetSelectedScenarios(); var selectedScenarioRow = scenarioTable.Select("[Name] = '" + xtraTrace + "'")[0]; int selectedIdx = scenarioTable.Rows.IndexOf(selectedScenarioRow); //scenariosTable.Rows.IndexOf( if (xtraTrace == "") { throw new Exception("Select an additional trace that is between 1 and the total number of traces"); } else { traceAnalysisList.Add(sListIn[selectedIdx]); } } return traceAnalysisList; }
private void LabelYaxis(SeriesList list) { pane.YAxis.Title.Text = String.Join(", ", list.Text.UniqueUnits); }
public abstract void WriteSeriesHeader(SeriesList list);
/* * ***************** NOTE: ****************** * SELECT 'daily_karl_test' as tablename,a.datetime, value,flag * FROM ( Select datetime from generate_series * ( '2016-07-23'::timestamp * , '2016-08-03 23:59:59.996'::timestamp * , '1 day'::interval) datetime ) a * * left join daily_karl_test b on a.datetime = b.datetime * * WHERE a.datetime >= '2016-07-23 00:00:00.000' AND a.datetime <= '2016-08-03 23:59:59.996' * * UNION ALL * SELECT 'daily_hrmo_etos' as tablename, datetime,value,flag FROM daily_hrmo_etos * WHERE datetime >= '2016-07-23 00:00:00.000' AND datetime <= '2016-08-03 23:59:59.996' * order by datetime,tablename * * */ /// <summary> /// Create a SQL command that performs UNION of multiple series /// so that can be queried in one round-trip to the server. /// </summary> /// <param name="list"></param> /// <param name="t1"></param> /// <param name="t2"></param> /// <returns></returns> private string CreateSQL(SeriesList list, DateTime t1, DateTime t2, TimeInterval interval, bool orderByDate=true) { if (list.Count == 0) return ""; string tableName = list[0].Table.TableName; Logger.WriteLine("CreateSQL"); Logger.WriteLine("list of " + list.Count + " series"); int startIndex = 0; var sql = ""; if (interval == TimeInterval.Daily) { startIndex = 1; // take care of first table with join to enumerate all dates in range sql = JoinFirstTableWithDatesBetween(t1, t2, tableName); if (list.Count > 1) sql += "\n UNION ALL \n"; } sql += BuildUnionSQL(list, t1, t2, startIndex); if( orderByDate) sql += " \norder by datetime,tablename "; else sql += " \norder by tablename,datetime "; return sql; }