private SeriesList ComputeDependenciesSameInterval(Series s) { int level = 0; SeriesList rval = new SeriesList(); var calcList = GetDependentsRecursive(s.Table.TableName, s.TimeInterval, ref level); if (calcList.Count > 0) { Logger.WriteLine("Found " + calcList.Count + " " + s.TimeInterval + " calculations to update "); } TimeSeriesDependency d = new TimeSeriesDependency(calcList); var sorted = d.Sort(); foreach (var item in sorted) { var cs = item as CalculationSeries; cs.Calculate(s.MinDateTime, s.MaxDateTime); if (cs.Count > 0) { rval.Add(cs); } } return(rval); }
/// <summary> /// Calculates Monthly values /// </summary> /// <param name="t1"></param> /// <param name="t2"></param> /// <param name="propertyFilter">series property filter. Example program:agrimet </param> public CalculationSeries[] ComputeMonthlyValues(DateTime t1, DateTime t2, string errorFileName = "") { if (m_interval != TimeInterval.Monthly) { throw new Exception("ComputeMonthlyValues must be used with daily interval. interval =" + m_interval); } Performance p = new Performance(); HydrometInstantSeries.Cache = new HydrometDataCache(); // clear out and make new cache. string dailyFileName = GetDailyOutgoingFileName(m_propertyFilter); Console.WriteLine("Computing daily values for " + m_calculationList.Count + " series"); TimeSeriesDependency td = new TimeSeriesDependency(m_calculationList); var sorted = td.Sort(); foreach (var s in sorted) { if (s.Enabled == 0) { Console.WriteLine("Skipping disabled calculation: " + s.Name); continue; // calculations turned off } var t1a = s.AdjustStartingDateFromProperties(t1, t2); // move inside s.Calculate(); if (m_db.Parser.VariableResolver is HydrometVariableResolver) { CacheAllParametersForSite(s, t1a, t2); // 50% performance boost. } Console.Write(s.Table.TableName + " = " + s.Expression); s.Calculate(t1a, t2); // saves to local time series database. LogStatusOfCalculation(errorFileName, s); WriteToHydrometDailyFile(dailyFileName, s); } if (s_appendToFile) // might not have any results { Console.WriteLine("Results Saved to " + dailyFileName); } p.Report(); // 185 seconds return(sorted); }
private static void PerformDailyComputations(SeriesList importSeries, List <CalculationSeries> calculationQueue, SeriesList routingList) { // do Actual Computations now. (in proper order...) TimeSeriesDependency td = new TimeSeriesDependency(calculationQueue); TimeRange tr; bool validRange = TryGetDailyTimeRange(importSeries, out tr, DateTime.Now); if (!validRange) { Console.WriteLine(" time range indicates don't perform calculation."); Console.WriteLine(" Current Time:" + DateTime.Now.ToString()); Console.WriteLine(" Default time range :" + tr.StartDate.ToString() + " " + tr.EndDate.ToString()); } var sortedCalculations = td.Sort(); foreach (CalculationSeries cs in sortedCalculations) { Console.Write(">>> " + cs.Table.TableName + ": " + cs.Expression); if (validRange) { cs.Calculate(tr.StartDate, tr.EndDate); if (cs.Count > 0) { routingList.Add(cs); if (cs.CountMissing() > 0) { Console.WriteLine(" Missing " + cs.CountMissing() + " records"); } else { Console.WriteLine(" OK"); } } } else { Console.WriteLine("Skipping because there is not a valid time range."); } } }
/// <summary> /// Imports time series data, /// 1) set flags /// 2) active alarms (TO DO) /// 3) compute dependent data (same interval) /// 4) compute daily data when encountering midnight values /// </summary> /// <param name="inputSeriesList"></param> /// <param name="computeDependencies"></param> /// <param name="computeDailyEachMidnight"></param> public void Import(SeriesList inputSeriesList, bool computeDependencies = false, bool computeDailyEachMidnight = false, string importTag = "data") { var calculationQueue = new SeriesList(); var routingList = new SeriesList(); foreach (var s in inputSeriesList) { // set flags. Logger.WriteLine("Checking Flags "); m_db.Quality.SetFlags(s); // to do, log/email flagged data // To Do.. check for alarms.. m_db.ImportSeriesUsingTableName(s, ""); routingList.Add(s); if (computeDependencies) { var z = ComputeDependenciesSameInterval(s); routingList.AddRange(z); } if (computeDailyEachMidnight) { var x = GetDailyCalculationsIfMidnight(s); foreach (var item in x) { if (!calculationQueue.ContainsTableName(item)) { calculationQueue.Add(item); } } } } if (calculationQueue.Count > 0) { // do Actual Computations now. (in proper order...) var list = new List <CalculationSeries>(); foreach (Series item in calculationQueue) { list.Add(item as CalculationSeries); } TimeSeriesDependency td = new TimeSeriesDependency(list); var sortedCalculations = td.Sort(); foreach (CalculationSeries cs in sortedCalculations) { Console.Write(">>> " + cs.Table.TableName + ": " + cs.Expression); //var cs = item as CalculationSeries; var t1 = inputSeriesList.MinDateTime.Date; var t2 = inputSeriesList.MaxDateTime; if (t1.Date == t2.AddDays(-1).Date) // spans midnight, compute yesterday. { t1 = t1.Date; t2 = t1.Date; } cs.Calculate(t1, t2); if (cs.Count > 0) { routingList.Add(cs); if (cs.CountMissing() > 0) { Console.WriteLine(" Missing " + cs.CountMissing() + " records"); } else { Console.WriteLine(" OK"); } } } } SeriesList instantRoute = new SeriesList(); SeriesList dailyRoute = new SeriesList(); // route data to other locations. foreach (var item in routingList) { TimeSeriesName tn = new TimeSeriesName(item.Table.TableName); item.Parameter = tn.pcode; item.SiteID = tn.siteid; if (item.TimeInterval == TimeInterval.Irregular) { instantRoute.Add(item); } if (item.TimeInterval == TimeInterval.Daily) { dailyRoute.Add(item); } } Console.WriteLine("Routing data"); TimeSeriesRouting.RouteInstant(instantRoute, importTag, m_routing); TimeSeriesRouting.RouteDaily(dailyRoute, importTag, m_routing); }
/// <summary> /// Calculates a group of daily values. /// </summary> /// <param name="t1"></param> /// <param name="t2"></param> /// <param name="propertyFilter">series property filter. Example program:agrimet </param> /// <param name="simulate">simulate calculations, don't actuually do it.</param> public CalculationSeries[] ComputeDailyValues(DateTime t1, DateTime t2, bool compareToHydromet = false, string errorFileName = "", string detailFileName = "", bool simulate = false) { Performance p = new Performance(); HydrometInstantSeries.Cache = new HydrometDataCache(); // clear out and make new cache. string dailyFileName = GetDailyFileName(m_propertyFilter); bool appendToFile = false; // for output file. Console.WriteLine("Computing daily values for " + m_dependencyList.Count + " series"); TimeSeriesDependency td = new TimeSeriesDependency(m_dependencyList); var sorted = td.Sort(); foreach (var s in sorted) { if (!s.Enabled) { continue; // calculations turned off } string originalExpression = s.Expression; // compute Values if (m_db.Parser.VariableResolver is HydrometVariableResolver) { CacheAllParametersForSite(s, t1, t2); // 50% performance boost. } Console.Write(s.Table.TableName + " = " + s.Expression); if (simulate) { Console.WriteLine("skipping calc"); continue; } s.Calculate(t1, t2); // Calculate() also saves to local time series database. if (s.Count == 0 || s.CountMissing() > 0) { File.AppendAllText(errorFileName, "Error: " + s.Table.TableName + " = " + s.Expression + "\n"); string msg = "\nDetails: " + s.Table.TableName + " = " + s.Expression + "\n"; foreach (var x in s.Messages) { msg += "\n" + x; } Console.WriteLine(msg); File.AppendAllText(detailFileName, msg); } else { // File.AppendAllText(errorFileName, " OK. "); Console.WriteLine(" OK. "); } if (compareToHydromet) { CompareToHydromet(s); } s.Expression = originalExpression; TimeSeriesName n = new TimeSeriesName(s.Table.TableName); HydrometDailySeries.WriteToArcImportFile(s, n.siteid, n.pcode, dailyFileName, appendToFile); if (!appendToFile) { appendToFile = true; // append after the first time. } } if (appendToFile) // might not have any results { Console.WriteLine("Results Saved to " + dailyFileName); } p.Report(); // 185 seconds return(sorted); }