public void TestSQLite() { var fn = FileUtility.GetTempFileName(".pdb"); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr); var sql = "insert into quality_limit values ('*_OB',120,-50,null); "; svr.RunSqlCommand(sql); var s = new Series(); s.Table.TableName = "karl_ob"; s.Add("1-1-2013", 100); s.Add("1-2-2013", -51); s.Add("1-3-2013", 100); s.Add("1-4-2013", 150); //db.TimeSeriesImporter.Process(s); TimeSeriesImporter ti = new TimeSeriesImporter(db); ti.Import(s); //db.ImportSeriesUsingTableName(s, true, setQualityFlags: true); s = db.GetSeriesFromTableName("karl_ob"); s.Read(); Console.WriteLine("has flags = " + s.HasFlags); s.WriteToConsole(true); Assert.AreEqual("", s["1-1-2013"].Flag); Assert.AreEqual("-", s["1-2-2013"].Flag); Assert.AreEqual("", s["1-3-2013"].Flag); Assert.AreEqual("+", s["1-4-2013"].Flag); }
public void ReservoirContentsWithDatabase() { Logger.EnableLogger(); var fn = FileUtility.GetTempFileName(".pdb"); System.IO.File.Delete(fn); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName,false); var c = new CalculationSeries("instant_karl_af"); var path = Path.Combine(Globals.TestDataPath, "rating_tables"); path = Path.Combine(path, "karl_af.txt"); c.Expression = "FileRatingTable(instant_karl_fb,\""+path+"\")"; c.TimeInterval = TimeInterval.Irregular; db.AddSeries(c); var fb = new Series("instant_karl_fb"); fb.TimeInterval = TimeInterval.Irregular; db.AddSeries(fb); fb.Add("1-1-2013", 1); fb.Add("1-2-2013", 2); fb.Add("1-3-2013", 3); fb.Add("1-4-2013", 4); TimeSeriesImporter ti = new TimeSeriesImporter(db); ti.Import(fb,computeDependencies:true);// this should force a calculation... var af = db.GetSeriesFromTableName("instant_karl_af"); Assert.NotNull(af, "Series not created"); af.Read(); Assert.AreEqual(4, af.Count); Assert.AreEqual(300, af[2].Value); }
public void MALI_DailyAverageMidnight() { string fn = Path.Combine(TestData.DataPath, "MALI_2016_08_16_2315.dat"); LoggerNetFile lf = new LoggerNetFile(fn); SeriesList list = lf.ToSeries(); TimeRange tr; bool valid = TimeSeriesImporter.TryGetDailyTimeRange(list, out tr, new DateTime(2016, 8, 17, 0, 10, 0)); Assert.IsTrue(valid); Assert.AreEqual(DateTime.Parse("2016-08-16"), tr.StartDate); Assert.AreEqual(DateTime.Parse("2016-08-16").EndOfDay(), tr.EndDate); }
public void MALI_TodayFilesNoCalc() { string fn = CreateFutureFile(); var contents = File.ReadAllText(fn); contents = contents.Replace("2016-08-16", DateTime.Now.ToString("yyyy-MM-dd")); File.WriteAllText(fn, contents); LoggerNetFile lf = new LoggerNetFile(fn); SeriesList list = lf.ToSeries(); TimeRange tr; bool valid = TimeSeriesImporter.TryGetDailyTimeRange(list, out tr, DateTime.Now); Assert.IsFalse(valid); }
public void MidnightSinglePointRange() { Series s = new Series(); DateTime t = DateTime.Parse("2017-03-10"); s.Add(t, 12.0); SeriesList list = new SeriesList(); list.Add(s); TimeRange tr; bool b = TimeSeriesImporter.TryGetDailyTimeRange(list, out tr, t); Assert.IsTrue(b); Assert.IsTrue(tr.StartDate == t.AddDays(-1)); Assert.IsTrue(tr.EndDate == t.AddDays(-1).EndOfDay()); }
public void ReservoirContentsWithDatabase() { Logger.EnableLogger(); var fn = FileUtility.GetTempFileName(".pdb"); System.IO.File.Delete(fn); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName); var c = new CalculationSeries("instant_karl_af"); var path = Path.Combine(Globals.TestDataPath, "rating_tables"); path = Path.Combine(path, "karl_af.txt"); c.Expression = "FileRatingTable(instant_karl_fb,\"" + path + "\")"; c.TimeInterval = TimeInterval.Irregular; db.AddSeries(c); var fb = new Series("instant_karl_fb"); fb.TimeInterval = TimeInterval.Irregular; db.AddSeries(fb); fb.Add("1-1-2013", 1); fb.Add("1-2-2013", 2); fb.Add("1-3-2013", 3); fb.Add("1-4-2013", 4); TimeSeriesImporter ti = new TimeSeriesImporter(db); ti.Import(fb, computeDependencies: true);// this should force a calculation... var af = db.GetSeriesFromTableName("instant_karl_af"); Assert.NotNull(af, "Series not created"); af.Read(); Assert.AreEqual(4, af.Count); Assert.AreEqual(300, af[2].Value); }
public void Interpolate2DWithDatabase() { Logger.EnableLogger(); var fn = FileUtility.GetTempFileName(".pdb"); File.Delete(fn); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr,false ); var c = new CalculationSeries("rir_q"); var path = Path.Combine(TestData.DataPath, "rating_tables"); path = Path.Combine(path, "rir_q.txt"); c.Expression = "FileLookupInterpolate2D(rir_fb, rir_ra, \"" + path + "\")" + " + FileLookupInterpolate2D(rir_fb, rir_rb, \"" + path + "\")"; c.TimeInterval = TimeInterval.Irregular; db.AddSeries(c); var fb = new Series("rir_fb"); fb.TimeInterval = TimeInterval.Irregular; db.AddSeries(fb); fb.Add("6-1-2011", 5110.99); fb.Add("6-2-2011", 5111.31); fb.Add("6-3-2011", 5111.71); fb.Add("6-4-2011", 5112.09); var ra = new Series("rir_ra"); ra.TimeInterval = TimeInterval.Irregular; ra.Add("6-1-2011", 2.1); ra.Add("6-2-2011", 1.29); ra.Add("6-3-2011", 1.29); ra.Add("6-4-2011", 1.29); db.AddSeries(ra); var rb = new Series("rir_rb"); rb.TimeInterval = TimeInterval.Irregular; rb.Add("6-1-2011", 2.1); rb.Add("6-2-2011", 1.28); rb.Add("6-3-2011", 1.28); rb.Add("6-4-2011", 1.28); db.AddSeries(rb); TimeSeriesImporter ti = new TimeSeriesImporter(db); ti.Import(fb, computeDependencies: true);// this should force a calculation... var q = db.GetSeriesFromTableName("rir_q"); Assert.NotNull(q, "Series not created"); q.Read(); /* * Flows from Hydromet * 6-1-2011, 1009.87 * 6-2-2011, 602.24 * 6-3-2011, 603.32 * 6-4-2011, 604.34 */ Assert.AreEqual(4, q.Count); Assert.AreEqual(1009.87, System.Math.Round(q[0].Value, 2)); Assert.AreEqual(603.32, System.Math.Round(q[2].Value, 2)); }
private void ProcessFile(RouteOptions routing, FileSystemInfo fi) { var fn = fi.FullName; string dir = System.IO.Path.GetDirectoryName(fn); if (fi.CreationTime.AddSeconds(2) > DateTime.Now) { Console.WriteLine(" skipping file newer than 2 seconds ago " + fn + " " + fi.CreationTime); return; } string importTag = "import"; // used to make friendly export filename try { TextFile tf = new TextFile(fi.FullName); SeriesList sl = new SeriesList(); if (HydrometInstantSeries.IsValidDMS3(tf)) { importTag = "decodes"; sl = Reclamation.TimeSeries.Hydromet.HydrometInstantSeries.HydrometDMS3DataToSeriesList(tf); } else if (LoggerNetFile.IsValidFile(tf)) { LoggerNetFile lf = new LoggerNetFile(tf); if (lf.IsValid && Array.IndexOf(validSites, lf.SiteName) >= 0) { importTag = lf.SiteName; sl = lf.ToSeries(validPcodes); } } else if (DecodesRawFile.IsValidFile(tf)) { DecodesRawFile df = new DecodesRawFile(tf); importTag = "raw"; sl = df.ToSeries(); } else { Console.WriteLine("skipped Unknown File Format: " + fn); return; } m_importer = new TimeSeriesImporter(m_db, routing); Console.WriteLine("Found " + sl.Count + " series in " + fn); foreach (var item in sl) { Logger.WriteLine(item.Table.TableName); } if (sl.Count > 0) { m_importer.Import(sl, m_computeDependencies, m_computeDailyOnMidnight, importTag); FileUtility.MoveToSubDirectory(Path.GetDirectoryName(fn), "attic", fn); } } catch (Exception ex) { Logger.WriteLine("Error:" + ex.Message); Console.WriteLine("Error: skipping file, will move to error subdirectory " + fn); FileUtility.MoveToSubDirectory(Path.GetDirectoryName(fn), "error", fn); } }
public void Interpolate2DWithDatabase() { Logger.EnableLogger(); var fn = FileUtility.GetTempFileName(".pdb"); File.Delete(fn); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr); var c = new CalculationSeries("rir_q"); var path = Path.Combine(Globals.TestDataPath, "rating_tables"); path = Path.Combine(path, "rir_q.txt"); c.Expression = "FileLookupInterpolate2D(rir_fb, rir_ra, \"" + path + "\")" + " + FileLookupInterpolate2D(rir_fb, rir_rb, \"" + path + "\")"; c.TimeInterval = TimeInterval.Irregular; db.AddSeries(c); var fb = new Series("rir_fb"); fb.TimeInterval = TimeInterval.Irregular; db.AddSeries(fb); fb.Add("6-1-2011", 5110.99); fb.Add("6-2-2011", 5111.31); fb.Add("6-3-2011", 5111.71); fb.Add("6-4-2011", 5112.09); var ra = new Series("rir_ra"); ra.TimeInterval = TimeInterval.Irregular; ra.Add("6-1-2011", 2.1); ra.Add("6-2-2011", 1.29); ra.Add("6-3-2011", 1.29); ra.Add("6-4-2011", 1.29); db.AddSeries(ra); var rb = new Series("rir_rb"); rb.TimeInterval = TimeInterval.Irregular; rb.Add("6-1-2011", 2.1); rb.Add("6-2-2011", 1.28); rb.Add("6-3-2011", 1.28); rb.Add("6-4-2011", 1.28); db.AddSeries(rb); TimeSeriesImporter ti = new TimeSeriesImporter(db); ti.Import(fb, computeDependencies: true);// this should force a calculation... var q = db.GetSeriesFromTableName("rir_q"); Assert.NotNull(q, "Series not created"); q.Read(); /* * Flows from Hydromet * 6-1-2011, 1009.87 * 6-2-2011, 602.24 * 6-3-2011, 603.32 * 6-4-2011, 604.34 */ Assert.AreEqual(4, q.Count); Assert.AreEqual(1009.87, System.Math.Round(q[0].Value, 2)); Assert.AreEqual(603.32, System.Math.Round(q[2].Value, 2)); }
private void ProcessFile(RouteOptions routing, string fileName) { string importTag = "import"; // used to make friendly export filename try { TextFile tf = new TextFile(fileName); SeriesList sl = new SeriesList(); if (HydrometInstantSeries.IsValidDMS3(tf)) { importTag = "decodes"; sl = HydrometInstantSeries.HydrometDMS3DataToSeriesList(tf); } else if( HydrometDailySeries.IsValidArchiveFile(tf)) { importTag = "htools"; sl = HydrometDailySeries.HydrometDailyDataToSeriesList(tf); } else if (LoggerNetFile.IsValidFile(tf)) { LoggerNetFile lf = new LoggerNetFile(tf); if (lf.IsValid && Array.IndexOf(validSites, lf.SiteName) >= 0) { importTag = lf.SiteName; sl = lf.ToSeries(validPcodes); } } //else if (DecodesRawFile.IsValidFile(tf)) //{ // DecodesRawFile df = new DecodesRawFile(tf); // importTag = "raw"; // sl = df.ToSeries(); //} else { Logger.WriteLine("skipped Unknown File Format: " + fileName); return; } m_importer = new TimeSeriesImporter(m_db, routing,m_saveOption); Console.WriteLine("Found " + sl.Count + " series in " + fileName); foreach (var item in sl) { Logger.WriteLine(item.Table.TableName); } if (sl.Count > 0) { m_importer.Import(sl, m_computeDependencies, m_computeDailyDependencies,importTag); FileUtility.MoveToSubDirectory(Path.GetDirectoryName(fileName), "attic", fileName); } } catch (Exception ex) { Logger.WriteLine("Error:" + ex.Message); Console.WriteLine("Error: skipping file, will move to error subdirectory " + fileName); FileUtility.MoveToSubDirectory(Path.GetDirectoryName(fileName), "error", fileName); } }