public TimeSeriesDatabaseTest() { if (!Directory.Exists(tmpDir)) { Directory.CreateDirectory(tmpDir); } string fn =Path.Combine(@"C:\temp","factory.pdb"); FileUtility.GetTempFileNameInDirectory(@"C:\temp\",".pdb"); SQLiteServer.CreateNewDatabase(fn); SQLiteServer svr = new SQLiteServer(fn); db = new TimeSeriesDatabase(svr,false); //string dataPath = ReclamationTesting.Properties.Settings.Default.DataPath; string dataPath = TestData.DataPath; File.Copy(Path.Combine(dataPath, textFileName), Path.Combine(tmpDir, textFileName),true); textFileName = Path.Combine(tmpDir, textFileName); File.Copy(Path.Combine(dataPath, excelFileName), Path.Combine(tmpDir, excelFileName),true); excelFileName = Path.Combine(tmpDir, excelFileName); File.Copy(Path.Combine(dataPath, updatedExcelFileName), Path.Combine(tmpDir, updatedExcelFileName), true); updatedExcelFileName = Path.Combine(tmpDir, updatedExcelFileName); // Add some data for export test Series s; int c; int sdi; AddExcelSeries(out s, out c, out sdi); AddTextSeries(out s, out c, out sdi); }
public CalculationSeries(TimeSeriesDatabase db,TimeSeriesDatabaseDataSet.SeriesCatalogRow sr) : base(db,sr) { m_parser = db.Parser; m_db = db; Init(); }
public void SiteInfoDesignTest() { var fn = FileUtility.GetTempFileName(".pdb"); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr,false); var siteCatalog = db.GetSiteCatalog(); siteCatalog.AddsitecatalogRow("BOII", "Boise station", "ID"); svr.SaveTable(siteCatalog); var sc = db.GetSeriesCatalog(); var s = new Series(); s.SiteID = "BOII"; sc.AddSeriesCatalogRow(s, sc.NextID(), db.GetRootObjects()[0].ID); sc.Save(); var si = db.SiteInfo("BOII"); bool idaho = si.state == "ID"; //idaho var timezone = si.timezone; TimeSeriesDatabaseDataSet.SeriesCatalogDataTable d = si.SeriesList(); TimeSeriesDatabaseDataSet.SeriesCatalogRow row = d[0]; row.Parameter = "Asce ET #5"; d.Save(); Console.WriteLine(si.SeriesList()[0].Parameter); //var goodStats = (si.Parameters()[0].statistic == "Avg"); }
/// <summary> /// Used to create ModsimSeries from TimeSeriesDatabase /// </summary> public ModsimSeries(TimeSeriesDatabase db, Reclamation.TimeSeries.TimeSeriesDatabaseDataSet.SeriesCatalogRow sr) : base(db, sr) { m_defaultUnits = Units; m_xyFilename = ConnectionStringUtility.GetFileName(ConnectionString, db.DataSource); m_mdbFilename = Path.ChangeExtension(m_xyFilename, null) + "OUTPUT.mdb"; m_accdbFilename = Path.ChangeExtension(m_xyFilename, null) + "OUTPUT.accdb"; if (File.Exists(m_accdbFilename)) { m_databaseName = m_accdbFilename; } else m_databaseName = m_mdbFilename; if (File.Exists(m_xyFilename)) { StreamReader srr = File.OpenText(m_xyFilename); string line1 = srr.ReadLine(); string[] line1Parts = line1.Split(' '); m_xyFileVersion = new Version(line1Parts[1]); srr.Close(); } else { Logger.WriteLine("Error: File missing " + m_xyFilename); } ScenarioName = Path.GetFileNameWithoutExtension(m_xyFilename); ExternalDataSource = true; modsimName = ConnectionStringToken("ModsimName"); timeSeriesName = ConnectionStringToken("TimeSeriesName"); ReadOnly = true; }
private static void AddForecastSeries(string period, string scenario, TimeSeriesDatabase dbVic, string name, int thruMonth, string vicName) { string fn = period + scenario + ".pdb"; SQLiteServer svr = new SQLiteServer(fn); TimeSeriesDatabase db = new TimeSeriesDatabase(svr); Series sVic = dbVic.GetSeriesFromName(vicName + period + scenario); sVic.Read(); Series s = new Series(name + "_Forecast"); s.TimeInterval = TimeInterval.Monthly; s.TimeSeriesDatabase = db; s.Units = "acre-feet"; //initial model data start date and value s.Add(sVic[0].DateTime, sVic[0].Value * 1.98347 * sVic[0].DateTime.EndOfMonth().Day); for (int i = 0; i < sVic.Count; i++) { int month = sVic[i].DateTime.Month; if (month <= 6) { Point pt = new Point(); pt.DateTime = sVic[i].DateTime; pt.Value = SumThruMonthToAcreFt(sVic, pt.DateTime, thruMonth); s.Add(pt); } } db.AddSeries(s); SetSeriesDatesToBeginningOfMonth(s); ConsolePrintSeriesNameAndCount(s); }
public void ReservoirContentsWithDatabase() { Logger.EnableLogger(); var fn = FileUtility.GetTempFileName(".pdb"); System.IO.File.Delete(fn); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName,false); var c = new CalculationSeries("instant_karl_af"); var path = Path.Combine(Globals.TestDataPath, "rating_tables"); path = Path.Combine(path, "karl_af.txt"); c.Expression = "FileRatingTable(instant_karl_fb,\""+path+"\")"; c.TimeInterval = TimeInterval.Irregular; db.AddSeries(c); var fb = new Series("instant_karl_fb"); fb.TimeInterval = TimeInterval.Irregular; db.AddSeries(fb); fb.Add("1-1-2013", 1); fb.Add("1-2-2013", 2); fb.Add("1-3-2013", 3); fb.Add("1-4-2013", 4); TimeSeriesImporter ti = new TimeSeriesImporter(db); ti.Import(fb,computeDependencies:true);// this should force a calculation... var af = db.GetSeriesFromTableName("instant_karl_af"); Assert.NotNull(af, "Series not created"); af.Read(); Assert.AreEqual(4, af.Count); Assert.AreEqual(300, af[2].Value); }
private void ReadFromPisces() { Logger.WriteLine("opening " + m_dbName); SQLiteServer svr = new SQLiteServer(m_dbName); TimeSeriesDatabase db = new TimeSeriesDatabase(svr); SeriesList list = new SeriesList(); for (int i = 0; i < m_seriesName.Count; i++) { Logger.WriteLine("looking for series '" + m_seriesName[i] + "'"); var s = db.GetSeriesFromName(m_seriesName[i]); if (s != null) { s.Read(m_t1, m_t2); list.Add(s); } else { throw new Exception("unable to find series '" + m_seriesName[i] + "' in pisces database '" + m_dbName + "'"); } } WriteToRiverwareFiles(list); }
public void AddBug() { DateTime t1 = new DateTime(2013, 1, 1); var t2 = t1.AddDays(365); var fn = FileUtility.GetTempFileNameInDirectory(@"c:\temp\", ".pdb"); SQLiteServer svr = new SQLiteServer(fn); TimeSeriesDatabase db = new Reclamation.TimeSeries.TimeSeriesDatabase(svr, false); var s = new HydrometDailySeries("pal", "af"); s.Name = "pal_af"; s.Read(t1, t2); db.AddSeries(s); var cs = new CalculationSeries("add_test"); cs.Expression = "pal_af + pal_af"; db.AddSeries(cs); cs = db.GetSeriesFromName("add_test") as CalculationSeries; cs.Calculate(t1, t2); cs = db.GetSeriesFromName("add_test") as CalculationSeries; cs.Read(); Assert.IsTrue(cs.Count > 0); }
static void Main(string[] args) { Logger.EnableLogger(); if (args.Length < 2 || args.Length > 3 ) { Console.WriteLine("Usage: GetUsace site_list.csv hourly|daily [dump.pdb] "); Console.WriteLine("Where: site_list.csv is a catalog of sites to import"); Console.WriteLine(" houly or daily data"); Console.WriteLine(" dump.db creates a test pisces database for comparison to hydromet"); return; } FileUtility.CleanTempPath(); CsvFile csv = new CsvFile(args[0]); //interval,filename,cbtt,pcode,header1,header2,header3,header4,header5 //instant,gcl_h.dat,GCL,FB,Forebay,(ft),,, //instant,gcl_h.dat,GCL,TW,Tailwatr,(ft),,, //instant,gcl_h.dat,GCL,QE,Generatn,Flow,(kcfs),, TimeSeriesDatabase db=null; if (args.Length == 3) { SQLiteServer svr = new SQLiteServer(args[2]); db = new TimeSeriesDatabase(svr); } var rows = csv.Select("interval = '" + args[1] + "'"); var interval = TimeInterval.Daily; if( args[1] == "hourly") interval = TimeInterval.Hourly; Console.WriteLine("Processing "+rows.Length+" parameters"); for (int i = 0; i < rows.Length; i++) { var url = rows[i]["url"].ToString(); var cbtt = rows[i]["cbtt"].ToString(); var pcode = rows[i]["pcode"].ToString(); string[] headers = GetHeaders(rows[i]); var soffset = rows[i]["offset"].ToString(); double offset = 0; if (soffset.Trim() != "") { offset = double.Parse(soffset); } var s = ProcessFile(url,interval, cbtt, pcode,offset,true, headers); if (db != null) { SaveToDatabase(args, db, cbtt, pcode, s); } } }
// private SeriesCatalog m_sc; public SqlSeries( TimeSeriesDatabase db, int sdi) { m_sdi = sdi; m_db = db; // set properties from data base. db.UpdateSeriesProperties(this, sdi); }
public void TestMySQL() { var svr = MySqlServer.GetMySqlServer("localhost", "timeseries"); var db = new TimeSeriesDatabase(svr,false); BasicDatabaseTest(db); }
public TestMidnightCalculation() { FileUtility.CleanTempPath(); string fn = FileUtility.GetTempFileName(".pdb"); Console.WriteLine(fn); var svr = new SQLiteServer(fn); db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName,false); }
public Quality( TimeSeriesDatabase db) { m_db = db; if (m_limit == null) { m_limit = new TimeSeriesDatabaseDataSet.quality_limitDataTable(); m_db.Server.FillTable(m_limit); } }
public FileImporter(Reclamation.TimeSeries.TimeSeriesDatabase db, DatabaseSaveOptions saveOption= DatabaseSaveOptions.UpdateExisting) { m_saveOption = saveOption; m_db = db; if( ConfigurationManager.AppSettings["ValidLoggerNetPcodes"] != null) validPcodes = ConfigurationManager.AppSettings["ValidLoggerNetPcodes"].Split(','); if (ConfigurationManager.AppSettings["ValidLoggerNetSites"] != null) validSites = ConfigurationManager.AppSettings["ValidLoggerNetSites"].Replace(" ", "").Replace("\r\n", "").Split(','); }
public FileImporter(Reclamation.TimeSeries.TimeSeriesDatabase db) { m_db = db; if (ConfigurationManager.AppSettings["ValidLoggerNetPcodes"] != null) { validPcodes = ConfigurationManager.AppSettings["ValidLoggerNetPcodes"].Split(','); } if (ConfigurationManager.AppSettings["ValidLoggerNetSites"] != null) { validSites = ConfigurationManager.AppSettings["ValidLoggerNetSites"].Replace(" ", "").Replace("\r\n", "").Split(','); } }
public UrgwomSeries(TimeSeriesDatabase db, TimeSeriesDatabaseDataSet.SeriesCatalogRow sr) : base(db, sr) { TimeInterval = TimeSeries.TimeInterval.Daily; ExternalDataSource = true; ReadOnly = true; Provider = "UrgwomSeries"; ScenarioName = "ScenarioName"; xlsFilename = ConnectionStringUtility.GetFileName(ConnectionString, m_db.DataSource); sheetName = ConnectionStringToken("SheetName"); dateColumn = ConnectionStringToken("DateColumn"); valueColumn = ConnectionStringToken("ValueColumn"); }
public static void CreatePiscesTree(string fileName, PiscesFolder root, TimeSeriesDatabase db) { mi = new Model(); s_db = db; sdi = db.NextSDI(); studyFolderID = sdi; int parentID = root.ID; seriesCatalog = new TimeSeriesDatabaseDataSet.SeriesCatalogDataTable(); if (File.Exists(fileName)) { XYFileReader.Read(mi, fileName); m_xyFilename = Path.GetFileNameWithoutExtension(fileName); } else { throw new FileNotFoundException("Modsim xy file is not found " + fileName); } string mdbJetName = Path.Combine(Path.GetDirectoryName(fileName), m_xyFilename + "OUTPUT.mdb"); string mdbAceName = Path.Combine(Path.GetDirectoryName(fileName), m_xyFilename + "OUTPUT.accdb"); if (File.Exists(mdbAceName)) { m_databaseName = mdbAceName; } else m_databaseName = mdbJetName; if (File.Exists(m_databaseName)) { m_db = new AccessDB(m_databaseName); dir = Path.GetDirectoryName(Path.GetFullPath(m_databaseName)); //AddNewRow(sdi,parentID,true, "", mi.name, ""); AddNewRow(sdi, parentID, true, "", Path.GetFileNameWithoutExtension(fileName), ""); ReservoirsTree(); DemandsTree(); RiverLinksTree(); TotalsTree(); } else { throw new FileNotFoundException(" MODSIM output not found " + m_databaseName); } //DataTableOutput.Write(seriesCatalog, @"C:\temp\a.csv",false); db.Server.SaveTable(seriesCatalog); db.RefreshFolder(root); }
public void ImportDecodesAndProcessWithFlagLimits() { Logger.EnableLogger(); FileUtility.CleanTempPath(); var fn1 = FileUtility.GetTempFileName(".pdb"); Console.WriteLine(fn1); var svr = new SQLiteServer(fn1); var db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName,false); var tmpDir = CopyTestDecodesFileToTempDirectory("decodes_lapo.txt"); var rtlapo = CreateTempRatingTable("lapo.csv", new double[] {3.50,3.54,3.55,5.54 }, new double[] {1,2,3,10 }); // set limits gh: low=3.53, high 3.6, rate of change/hour 1 Quality q = new Quality(db); q.SaveLimits("instant_lapo_gh", 3.6, 3.53, 1.0); q.SaveLimits("instant_lapo_q", 5, 1.1, 0); var site = db.GetSiteCatalog(); site.AddsitecatalogRow("lapo", "", "OR"); db.Server.SaveTable(site); var c = new CalculationSeries("instant_lapo_q"); c.SiteID = "lapo"; c.Expression = "FileRatingTable(%site%_gh,\""+rtlapo+"\")"; db.AddSeries(c); //SeriesExpressionParser.Debug = true; FileImporter import = new FileImporter(db); import.Import(tmpDir,RouteOptions.None,computeDependencies:true,searchPattern:"*.txt"); db.Inventory(); var s = db.GetSeriesFromTableName("instant_lapo_gh"); var expectedFlags = new string[] { "", "", "", "+", "", "", "", "-" }; for (int i = 0; i < s.Count; i++) { Assert.AreEqual(expectedFlags[i], s[i].Flag, " flag not expected "); } s = db.GetSeriesFromTableName("instant_lapo_q"); s.Read(); Assert.IsTrue(s.Count > 0, "No flow data computed lapo"); s.WriteToConsole(true); // computed flows should be: 2 2 2 10 2 2 1 expectedFlags = new string[]{"","","","+","","","","-"}; //q>=1 and q<= 5 for (int i = 0; i < s.Count; i++) { Assert.AreEqual(expectedFlags[i], s[i].Flag.Trim()," Flag check on Flow (Q) "); } SeriesExpressionParser.Debug = false; }
public void ImportParameterWithUnderscore() { Logger.OnLogEvent += Logger_OnLogEvent; var fn1 = FileUtility.GetTempFileName(".pdb"); Console.WriteLine(fn1); var svr = new SQLiteServer(fn1); var db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName,false); var tmpDir = TestRatingTableDependency.CopyTestDecodesFileToTempDirectory("instant_20150708152901.txt"); FileImporter import = new FileImporter(db); import.Import(tmpDir, RouteOptions.None, computeDependencies: true, searchPattern: "*.txt"); db.Inventory(); //Assert.IsFalse(anyErrors); }
public void TestPostgresql() { // using database nunit owned by user running the test Logger.EnableLogger(); var svr = TestPostgreSQL.GetPGServer() as PostgreSQL; var tables = svr.TableNames(); foreach (var tn in tables) { svr.RunSqlCommand("drop table \"" + tn + "\""); Console.WriteLine(tn); } TimeSeriesDatabase db = new TimeSeriesDatabase(svr,false); BasicDatabaseTest(db); }
/// <summary> /// Imports daily data from Hydromet into TimeSeriesDatabase /// </summary> /// <param name="db"></param> private static void ImportHydrometDaily(TimeSeriesDatabase db, DateTime t1, DateTime t2, string filter, string propertyFilter) { Performance perf = new Performance(); Console.WriteLine("ImportHydrometDaily"); int block = 1; foreach (string query in GetBlockOfQueries(db,TimeInterval.Daily,filter,propertyFilter)) { var table = HydrometDataUtility.ArchiveTable(HydrometHost.PN, query, t1, t2, 0); Console.WriteLine("Block " + block + " has " + table.Rows.Count + " rows "); Console.WriteLine(query); SaveTableToSeries(db, table, TimeInterval.Daily); block++; } perf.Report("Finished importing daily data"); // 15 seconds }
private static void AddForecastsToPiscesDatabase(TimeSeriesDatabase dbVic, DataTable forecasts, string period, string scenario) { for (int i = 0; i < forecasts.Rows.Count; i++) { string vicName = forecasts.Rows[i]["VicName"].ToString().Trim(); if (string.IsNullOrEmpty(vicName)) { continue; } string mNode = forecasts.Rows[i]["ModsimNode"].ToString().Trim(); int thruMonth = Convert.ToInt32( forecasts.Rows[i]["ThruMonth"].ToString().Trim()); AddForecastSeries(period, scenario, dbVic, mNode, thruMonth, vicName); } }
public Cr10xSeries(TimeSeriesDatabase db,Reclamation.TimeSeries.TimeSeriesDatabaseDataSet.SeriesCatalogRow sr) : base(db,sr) { ExternalDataSource = true; ReadOnly = true; m_filename = ConnectionStringUtility.GetToken(ConnectionString, "FileName",""); if (!Path.IsPathRooted(m_filename)) { string dir = Path.GetDirectoryName(m_db.DataSource); m_filename = Path.Combine(dir, m_filename); } m_interval = ConnectionStringToken("Interval"); m_columnNumber = Convert.ToInt32(ConnectionStringUtility.GetToken(ConnectionString, "ColumnNumber","")); InitTimeSeries(null, this.Units, this.TimeInterval, this.ReadOnly, false, true); Appearance.LegendText = Name; }
public static IEnumerable<String> GetBlockOfQueries(TimeSeriesDatabase db, TimeInterval interval, string filter,string propertyFilter="", int blockSize=75, bool ignoreQuality=true) { var rval = new List<string>(); foreach (Series s in db.GetSeries(interval, filter,propertyFilter).ToArray()) { TimeSeriesName tn = new TimeSeriesName(s.Table.TableName); //rval.Add(s.SiteID + " " + s.Parameter); if (Array.IndexOf(s_quality_parameters, tn.pcode.ToLower() ) >=0 ) continue; // skip quality parameters rval.Add(tn.siteid + " " + tn.pcode); if (rval.Count >= blockSize) { yield return String.Join(",",rval.ToArray()); rval.Clear(); } } yield return String.Join(",", rval.ToArray()); }
/// <summary> /// Imports instant data from Hydromet into TimeSeriesDatabase /// </summary> /// <param name="db"></param> private static void ImportHydrometInstant(TimeSeriesDatabase db,DateTime start, DateTime end, string filter,string propertyFilter) { // TO DO.. the outer loop of Date ranges (t,t3) could // be generated as a separate task. Console.WriteLine("ImportHydrometInstant"); TimeRange timeRange = new TimeRange(start, end); foreach (TimeRange item in timeRange.Split(30)) { int block = 1; foreach (string query in GetBlockOfQueries(db, TimeInterval.Irregular, filter, propertyFilter)) { Console.WriteLine("Reading " + item.StartDate + " to " + item.EndDate); var table = HydrometDataUtility.DayFilesTable(HydrometHost.PN, query, item.StartDate, item.EndDate, 0); Console.WriteLine("Block " + block + " has " + table.Rows.Count + " rows "); Console.WriteLine(query); SaveTableToSeries(db, table, TimeInterval.Irregular); block++; } } Console.WriteLine("Finished importing 15-minute data"); }
public static void BasicDatabaseTest(TimeSeriesDatabase db) { Assert.IsTrue(db.GetSeriesCatalog().Rows.Count ==1 , " initial catalog should have root"); Reclamation.TimeSeries.Hydromet.HydrometInfoUtility.AutoUpdate = true; DateTime t2 = DateTime.Now.Date.AddDays(-10); DateTime t1 = DateTime.Now.Date.AddDays(-30); Series s = new HydrometDailySeries("jck", "af"); int id = db.AddSeries(s); s = db.GetSeries(id); s.Read(t1, t2); s.WriteToConsole(); Assert.AreEqual(21, s.Count); s.Read(t1, DateTime.Now.Date.AddDays(-9)); //force auto update.(HydrometDaily supports this) // check if auto update worked. Assert.AreEqual(22, s.Count); Assert.AreEqual(2, db.GetSeriesCatalog().Rows.Count, "Catalog row count"); var por = s.GetPeriodOfRecord(); Assert.AreEqual(22, por.Count,"period of record"); }
public void AddBug() { DateTime t1 = new DateTime(2013,1,1); var t2 = t1.AddDays(365); var fn = FileUtility.GetTempFileNameInDirectory(@"c:\temp\",".pdb"); SQLiteServer svr = new SQLiteServer(fn); TimeSeriesDatabase db = new Reclamation.TimeSeries.TimeSeriesDatabase(svr,false); var s = new HydrometDailySeries("pal","af"); s.Name = "pal_af"; s.Read(t1,t2); db.AddSeries(s); var cs = new CalculationSeries("add_test"); cs.Expression = "pal_af + pal_af"; db.AddSeries(cs); cs = db.GetSeriesFromName("add_test") as CalculationSeries; cs.Calculate(t1, t2); cs = db.GetSeriesFromName("add_test") as CalculationSeries; cs.Read(); Assert.IsTrue(cs.Count > 0); }
private static void AddSeries(string period, string scenario, TimeSeriesDatabase db, VariableResolver vr, string name, string equation, ModsimType mType) { if (name.Trim().ToLower() == "nan") { return; } if (name.Trim() == "") { Console.WriteLine("--- WARNING modsim Node Name is missing. Type = " + mType.ToString()); return; } CalculationSeries cs = new CalculationSeries(name); cs.TimeInterval = TimeInterval.Monthly; if (mType == ModsimType.Gain) { cs.Expression = string.Format("Max({0}, 0)", equation); } if (mType == ModsimType.Negative) { cs.Expression = string.Format("Abs(Min({0}, 0))", equation); } cs.TimeSeriesDatabase = db; cs.Parser.VariableResolver = vr; cs.Units = "cfs"; db.AddSeries(cs); // add series before calcualte to get an id assigned. cs.Calculate(); // Calculate also saves the data. SetSeriesDatesToBeginningOfMonth(cs); ConsolePrintSeriesNameAndCount(cs); }
public void Interpolate2DWithDatabase() { Logger.EnableLogger(); var fn = FileUtility.GetTempFileName(".pdb"); File.Delete(fn); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr,false ); var c = new CalculationSeries("rir_q"); var path = Path.Combine(TestData.DataPath, "rating_tables"); path = Path.Combine(path, "rir_q.txt"); c.Expression = "FileLookupInterpolate2D(rir_fb, rir_ra, \"" + path + "\")" + " + FileLookupInterpolate2D(rir_fb, rir_rb, \"" + path + "\")"; c.TimeInterval = TimeInterval.Irregular; db.AddSeries(c); var fb = new Series("rir_fb"); fb.TimeInterval = TimeInterval.Irregular; db.AddSeries(fb); fb.Add("6-1-2011", 5110.99); fb.Add("6-2-2011", 5111.31); fb.Add("6-3-2011", 5111.71); fb.Add("6-4-2011", 5112.09); var ra = new Series("rir_ra"); ra.TimeInterval = TimeInterval.Irregular; ra.Add("6-1-2011", 2.1); ra.Add("6-2-2011", 1.29); ra.Add("6-3-2011", 1.29); ra.Add("6-4-2011", 1.29); db.AddSeries(ra); var rb = new Series("rir_rb"); rb.TimeInterval = TimeInterval.Irregular; rb.Add("6-1-2011", 2.1); rb.Add("6-2-2011", 1.28); rb.Add("6-3-2011", 1.28); rb.Add("6-4-2011", 1.28); db.AddSeries(rb); TimeSeriesImporter ti = new TimeSeriesImporter(db); ti.Import(fb, computeDependencies: true);// this should force a calculation... var q = db.GetSeriesFromTableName("rir_q"); Assert.NotNull(q, "Series not created"); q.Read(); /* * Flows from Hydromet * 6-1-2011, 1009.87 * 6-2-2011, 602.24 * 6-3-2011, 603.32 * 6-4-2011, 604.34 */ Assert.AreEqual(4, q.Count); Assert.AreEqual(1009.87, System.Math.Round(q[0].Value, 2)); Assert.AreEqual(603.32, System.Math.Round(q[2].Value, 2)); }
public sitepropertiesDataTable(TimeSeriesDatabase db) : base() { string sql = "Select * from siteproperties"; db.Server.FillTable(this, sql); this.TableName = "siteproperties"; m_db = db; }
public TimeSeriesTransfer(TimeSeriesDatabase db) { m_db = db; m_siteproperty = m_db.GetSiteProperties(); }
public JSONSites(TimeSeriesDatabase db) { this.db = db; }
// ENTRY POINT FOR PROCESSING static void ProcessRogueBiOP(DateTime t1, DateTime t2, string piscesFile) { // Toggle to read flagged data HydrometInstantSeries.KeepFlaggedData = true; // Create pisces database to store data if (File.Exists(piscesFile)) File.Delete(piscesFile); var DB = new SQLiteServer(piscesFile); var pDB = new TimeSeriesDatabase(DB); // PROCESS INSTANT DATA PiscesFolder rFldr = pDB.AddFolder("RawData"); Console.Write("Processing Instant Series... "); var emiQ = GetInstantSeries("EMI", "Q", t1, t2, pDB, rFldr); var emiQC = GetInstantSeries("EMI", "QC", t1, t2, pDB, rFldr); var basoQ = GetInstantSeries("BASO", "Q", t1, t2, pDB, rFldr); var taloQC = GetInstantSeries("TALO", "QC", t1, t2, pDB, rFldr); var bctoQ = GetInstantSeries("BCTO", "Q", t1, t2, pDB, rFldr); var phxoQC = GetInstantSeries("PHXO", "QC", t1, t2, pDB, rFldr); var giloQ = GetInstantSeries("GILO", "Q", t1, t2, pDB, rFldr); var giloGH = GetInstantSeries("GILO", "GH", t1, t2, pDB, rFldr); var dicoQC = GetInstantSeries("DICO", "QC", t1, t2, pDB, rFldr); var slboQC = GetInstantSeries("SLBO", "QC", t1, t2, pDB, rFldr); var antoQ = GetInstantSeries("ANTO", "Q", t1, t2, pDB, rFldr); var antoQC = GetInstantSeries("ANTO", "QC", t1, t2, pDB, rFldr); var antoGH = GetInstantSeries("ANTO", "GH", t1, t2, pDB, rFldr); Console.WriteLine("Done importing instant data!"); // PROCESS HOURLY DATA PiscesFolder dFldr = pDB.AddFolder("HourlyData"); Console.WriteLine(""); Console.Write("Processing Hourly Series... "); var emiQ_h = ProcessHourlySeries(emiQ, "EMI_Q", pDB, dFldr); var emiQC_h = ProcessHourlySeries(emiQC, "EMI_QC", pDB, dFldr); var basoQ_h = ProcessHourlySeries(basoQ, "BASO_Q", pDB, dFldr); var taloQC_h = ProcessHourlySeries(taloQC, "TALO_QC", pDB, dFldr); var bctoQ_h = ProcessHourlySeries(bctoQ, "BCTO_Q", pDB, dFldr); var phxoQC_h = ProcessHourlySeries(phxoQC, "PHXO_QC", pDB, dFldr); var giloQ_h = ProcessHourlySeries(giloQ, "GILO_Q", pDB, dFldr); var giloGH_h = ProcessHourlySeries(giloGH, "GILO_GH", pDB, dFldr); var dicoQC_h = ProcessHourlySeries(dicoQC, "DICO_QC", pDB, dFldr); var slboQC_h = ProcessHourlySeries(slboQC, "SLBO_QC", pDB, dFldr); var antoQ_h = ProcessHourlySeries(antoQ, "ANTO_Q", pDB, dFldr); var antoQC_h = ProcessHourlySeries(antoQC, "ANTO_QC", pDB, dFldr); var antoGH_h = ProcessHourlySeries(antoGH, "ANTO_GH", pDB, dFldr); Console.WriteLine("Done computing hourly data!"); // CHECK BIOP STUFF Console.WriteLine(""); Console.WriteLine("Data Processing: Checking Ramping Rates and Flows..."); Console.WriteLine(""); PiscesFolder ckFldr = pDB.AddFolder("RampingRateChecks"); Console.WriteLine("Checking EMI flows"); Series EMIHourlyDownRamp = CheckEMIHourlyDownRampingRate(emiQ_h); EMIHourlyDownRamp = CheckSourceSeries(emiQ_h, EMIHourlyDownRamp); pDB.AddSeries(EMIHourlyDownRamp, ckFldr); Series EMIDailyDownRamp = CheckEMIDailyDownRampingRate(emiQ_h); EMIDailyDownRamp = CheckSourceSeries(emiQ_h, EMIDailyDownRamp); pDB.AddSeries(EMIDailyDownRamp, ckFldr); Series EMIHourlyUpRamp = CheckEMIUpRampingRate(emiQ_h); EMIHourlyUpRamp = CheckSourceSeries(emiQ_h, EMIHourlyUpRamp); pDB.AddSeries(EMIHourlyUpRamp, ckFldr); Console.WriteLine("Checking BASO flows"); Series BASOHourlyDownRamp = CheckBASODownRampingRate(basoQ_h, taloQC_h); BASOHourlyDownRamp = CheckSourceSeries(basoQ_h, BASOHourlyDownRamp); BASOHourlyDownRamp = CheckSourceSeries(taloQC_h, BASOHourlyDownRamp); pDB.AddSeries(BASOHourlyDownRamp, ckFldr); Console.WriteLine("Checking BCTO flows"); Series BCTOHourlyDownRamp = CheckBCTODownRampingRate(bctoQ_h, phxoQC_h); BCTOHourlyDownRamp = CheckSourceSeries(bctoQ_h, BCTOHourlyDownRamp); BCTOHourlyDownRamp = CheckSourceSeries(phxoQC_h, BCTOHourlyDownRamp); pDB.AddSeries(BCTOHourlyDownRamp, ckFldr); Console.WriteLine("Checking GILO flows and gage height"); Series GILOUpRamp = new Series(); Series GILODownRamp = new Series(); CheckGILOFlowRampingRate(giloQ_h, slboQC_h, dicoQC_h, out GILODownRamp, out GILOUpRamp); GILODownRamp = CheckSourceSeries(giloQ_h, GILODownRamp); GILODownRamp = CheckSourceSeries(slboQC_h, GILODownRamp); GILODownRamp = CheckSourceSeries(dicoQC_h, GILODownRamp); GILOUpRamp = CheckSourceSeries(giloQ_h, GILOUpRamp); GILOUpRamp = CheckSourceSeries(slboQC_h, GILOUpRamp); GILOUpRamp = CheckSourceSeries(dicoQC_h, GILOUpRamp); pDB.AddSeries(GILOUpRamp, ckFldr); pDB.AddSeries(GILODownRamp, ckFldr); Series GILOGageUpRamp = CheckGILOGageRampingRate(giloGH_h, slboQC_h, dicoQC_h); GILOGageUpRamp = CheckSourceSeries(giloGH_h, GILOGageUpRamp); GILOGageUpRamp = CheckSourceSeries(slboQC_h, GILOGageUpRamp); GILOGageUpRamp = CheckSourceSeries(dicoQC_h, GILOGageUpRamp); pDB.AddSeries(GILOGageUpRamp, ckFldr); Console.WriteLine("Checking ANTO flows and gage height"); Series ANTOUpRamp = new Series(); Series ANTODownRamp = new Series(); CheckANTOFlowRampingRate(antoQ_h, antoQC_h, out ANTODownRamp, out ANTOUpRamp); ANTODownRamp = CheckSourceSeries(antoQ_h, ANTODownRamp); ANTODownRamp = CheckSourceSeries(antoQC_h, ANTODownRamp); ANTOUpRamp = CheckSourceSeries(antoQ_h, ANTOUpRamp); ANTOUpRamp = CheckSourceSeries(antoQC_h, ANTOUpRamp); pDB.AddSeries(ANTOUpRamp, ckFldr); pDB.AddSeries(ANTODownRamp, ckFldr); Series ANTOGageUpRamp = CheckANTOGageRampingRate(antoGH_h, antoQC_h); ANTOGageUpRamp = CheckSourceSeries(antoGH_h, ANTOGageUpRamp); ANTOGageUpRamp = CheckSourceSeries(antoQC_h, ANTOGageUpRamp); pDB.AddSeries(ANTOGageUpRamp, ckFldr); }
public static void WriteWithDataTable(string filename, Watershed watershed, bool compress = false, bool createPiscesDB = false) { var server = SqLiteEnsemble.GetServer(filename); byte[] uncompressed = null; Reclamation.TimeSeries.TimeSeriesDatabase db; int locIdx = 1; int WatershedFolderIndex = 1; int scIndex = 0; int rowCounter = 0; Reclamation.TimeSeries.TimeSeriesDatabaseDataSet.SeriesCatalogDataTable sc = null; if (createPiscesDB) { db = new Reclamation.TimeSeries.TimeSeriesDatabase(server); // limit how much we query. //var where = "id = (select max(id) from seriescatalog) or id = parentid"; var where = "id = (select max(id) from seriescatalog)"; sc = db.GetSeriesCatalog(where); WatershedFolderIndex = sc.AddFolder(watershed.Name); // creates root level folder scIndex = WatershedFolderIndex + 2; } else { } var timeSeriesTable = GetBlobTable(server); int index = server.NextID("timeseries_blob", "id"); foreach (Location loc in watershed.Locations) { if (createPiscesDB) { locIdx = sc.AddFolder(loc.Name, ++scIndex, WatershedFolderIndex); } foreach (Forecast f in loc.Forecasts) { var t = f.IssueDate; var timeseries_start_date = f.TimeStamps[0]; index++; var row = timeSeriesTable.NewRow();// create rows in separate loop first row["id"] = index; row["issue_date"] = f.IssueDate; row["watershed"] = watershed.Name; row["location_name"] = loc.Name; row["timeseries_start_date"] = timeseries_start_date; row["member_length"] = f.Ensemble.GetLength(1); row["member_count"] = f.Ensemble.GetLength(0); row["compressed"] = compress ? 1 : 0; row["byte_value_array"] = ConvertToBytes(f.Ensemble, compress, ref uncompressed); if (createPiscesDB) { string connectionString = "timeseries_blobs.id=" + index + ";member_length=" + f.Ensemble.GetLength(1) + ";ensemble_member_index={member_index}" + ";timeseries_start_date=" + timeseries_start_date.ToString("yyyy-MM-dd HH:mm:ss"); scIndex = AddPiscesSeries(loc.Name, scIndex, sc, f, locIdx, connectionString); } timeSeriesTable.Rows.Add(row); rowCounter++; if (rowCounter % 1000 == 0) { server.SaveTable(timeSeriesTable); timeSeriesTable.Rows.Clear(); timeSeriesTable.AcceptChanges(); } } } if (createPiscesDB) { server.SaveTable(sc); } server.SaveTable(timeSeriesTable); }
private void Init(TimeSeriesDatabase db) { fileName = ConnectionStringUtility.GetFileName(ConnectionString, db.DataSource); ScenarioName = Path.GetFileNameWithoutExtension(fileName); }
public UrgwomUtility(TimeSeriesDatabase db, string excelFilename) { xls = new ExcelUtility(excelFilename); m_db = db; this.excelFilename = excelFilename; }
public SQLiteSeries(TimeSeriesDatabase db, Reclamation.TimeSeries.TimeSeriesDatabaseDataSet.SeriesCatalogRow sr) : base(db, sr) { ExternalDataSource = true; Init(db); }
public SeriesProperties(int id, TimeSeriesDatabase db) { m_seriesID = id; m_seriesProperties = db.GetSeriesProperties(true); m_db = db; }
public void Connect(BasicDBServer svr) { m_db = new TimeSeriesDatabase(svr, false); Defaults(m_db); }
public PiscesFactory(TimeSeriesDatabase db) { this.db = db; }