public void SiteInfoDesignTest() { var fn = FileUtility.GetTempFileName(".pdb"); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr,false); var siteCatalog = db.GetSiteCatalog(); siteCatalog.AddsitecatalogRow("BOII", "Boise station", "ID"); svr.SaveTable(siteCatalog); var sc = db.GetSeriesCatalog(); var s = new Series(); s.SiteID = "BOII"; sc.AddSeriesCatalogRow(s, sc.NextID(), db.GetRootObjects()[0].ID); sc.Save(); var si = db.SiteInfo("BOII"); bool idaho = si.state == "ID"; //idaho var timezone = si.timezone; TimeSeriesDatabaseDataSet.SeriesCatalogDataTable d = si.SeriesList(); TimeSeriesDatabaseDataSet.SeriesCatalogRow row = d[0]; row.Parameter = "Asce ET #5"; d.Save(); Console.WriteLine(si.SeriesList()[0].Parameter); //var goodStats = (si.Parameters()[0].statistic == "Avg"); }
private static void AddForecastSeries(string period, string scenario, TimeSeriesDatabase dbVic, string name, int thruMonth, string vicName) { string fn = period + scenario + ".pdb"; SQLiteServer svr = new SQLiteServer(fn); TimeSeriesDatabase db = new TimeSeriesDatabase(svr); Series sVic = dbVic.GetSeriesFromName(vicName + period + scenario); sVic.Read(); Series s = new Series(name + "_Forecast"); s.TimeInterval = TimeInterval.Monthly; s.TimeSeriesDatabase = db; s.Units = "acre-feet"; //initial model data start date and value s.Add(sVic[0].DateTime, sVic[0].Value * 1.98347 * sVic[0].DateTime.EndOfMonth().Day); for (int i = 0; i < sVic.Count; i++) { int month = sVic[i].DateTime.Month; if (month <= 6) { Point pt = new Point(); pt.DateTime = sVic[i].DateTime; pt.Value = SumThruMonthToAcreFt(sVic, pt.DateTime, thruMonth); s.Add(pt); } } db.AddSeries(s); SetSeriesDatesToBeginningOfMonth(s); ConsolePrintSeriesNameAndCount(s); }
private void ReadFromPisces() { Logger.WriteLine("opening " + m_dbName); SQLiteServer svr = new SQLiteServer(m_dbName); TimeSeriesDatabase db = new TimeSeriesDatabase(svr); SeriesList list = new SeriesList(); for (int i = 0; i < m_seriesName.Count; i++) { Logger.WriteLine("looking for series '" + m_seriesName[i] + "'"); var s = db.GetSeriesFromName(m_seriesName[i]); if (s != null) { s.Read(m_t1, m_t2); list.Add(s); } else { throw new Exception("unable to find series '" + m_seriesName[i] + "' in pisces database '" + m_dbName + "'"); } } WriteToRiverwareFiles(list); }
public TimeSeriesDatabaseTest() { if (!Directory.Exists(tmpDir)) { Directory.CreateDirectory(tmpDir); } string fn =Path.Combine(@"C:\temp","factory.pdb"); FileUtility.GetTempFileNameInDirectory(@"C:\temp\",".pdb"); SQLiteServer.CreateNewDatabase(fn); SQLiteServer svr = new SQLiteServer(fn); db = new TimeSeriesDatabase(svr,false); //string dataPath = ReclamationTesting.Properties.Settings.Default.DataPath; string dataPath = TestData.DataPath; File.Copy(Path.Combine(dataPath, textFileName), Path.Combine(tmpDir, textFileName),true); textFileName = Path.Combine(tmpDir, textFileName); File.Copy(Path.Combine(dataPath, excelFileName), Path.Combine(tmpDir, excelFileName),true); excelFileName = Path.Combine(tmpDir, excelFileName); File.Copy(Path.Combine(dataPath, updatedExcelFileName), Path.Combine(tmpDir, updatedExcelFileName), true); updatedExcelFileName = Path.Combine(tmpDir, updatedExcelFileName); // Add some data for export test Series s; int c; int sdi; AddExcelSeries(out s, out c, out sdi); AddTextSeries(out s, out c, out sdi); }
public void ReservoirContentsWithDatabase() { Logger.EnableLogger(); var fn = FileUtility.GetTempFileName(".pdb"); System.IO.File.Delete(fn); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName,false); var c = new CalculationSeries("instant_karl_af"); var path = Path.Combine(Globals.TestDataPath, "rating_tables"); path = Path.Combine(path, "karl_af.txt"); c.Expression = "FileRatingTable(instant_karl_fb,\""+path+"\")"; c.TimeInterval = TimeInterval.Irregular; db.AddSeries(c); var fb = new Series("instant_karl_fb"); fb.TimeInterval = TimeInterval.Irregular; db.AddSeries(fb); fb.Add("1-1-2013", 1); fb.Add("1-2-2013", 2); fb.Add("1-3-2013", 3); fb.Add("1-4-2013", 4); TimeSeriesImporter ti = new TimeSeriesImporter(db); ti.Import(fb,computeDependencies:true);// this should force a calculation... var af = db.GetSeriesFromTableName("instant_karl_af"); Assert.NotNull(af, "Series not created"); af.Read(); Assert.AreEqual(4, af.Count); Assert.AreEqual(300, af[2].Value); }
public void InterpolateWithStyle() { string zipFile = Path.Combine(Globals.TestDataPath, "UofIDisaggregationTest.zip"); var path = FileUtility.GetTempFileName(".pdb"); ZipFile.UnzipFile(zipFile, path); Reclamation.Core.SQLiteServer pDB = new Reclamation.Core.SQLiteServer(path); TimeSeriesDatabase DB = new TimeSeriesDatabase(pDB); Series sReal = DB.GetSeriesFromName("SS_Day_Mean"); sReal.Read(); Series sEst = DB.GetSeriesFromName("TS_Mon_Mean"); sEst.Read(); Series sOut = Reclamation.TimeSeries.Math.InterpolateWithStyle(sReal, sEst, "2001-3-1", "2003-12-1"); // Can't get the Series.Values.Sum() extension... workaround below. double sum1 = 0, sum2 = 0; for (int i = 0; i < sEst.Count; i++) { sum1 = sum1 + sEst[i].Value; sum2 = sum2 + sOut[i].Value; } sum1 = System.Math.Round(sum1, 2); sum2 = System.Math.Round(sum2, 2); Assert.AreEqual(sum1, sum2); }
static void Main(string[] args) { Logger.EnableLogger(); if (args.Length < 2 || args.Length > 3 ) { Console.WriteLine("Usage: GetUsace site_list.csv hourly|daily [dump.pdb] "); Console.WriteLine("Where: site_list.csv is a catalog of sites to import"); Console.WriteLine(" houly or daily data"); Console.WriteLine(" dump.db creates a test pisces database for comparison to hydromet"); return; } FileUtility.CleanTempPath(); CsvFile csv = new CsvFile(args[0]); //interval,filename,cbtt,pcode,header1,header2,header3,header4,header5 //instant,gcl_h.dat,GCL,FB,Forebay,(ft),,, //instant,gcl_h.dat,GCL,TW,Tailwatr,(ft),,, //instant,gcl_h.dat,GCL,QE,Generatn,Flow,(kcfs),, TimeSeriesDatabase db=null; if (args.Length == 3) { SQLiteServer svr = new SQLiteServer(args[2]); db = new TimeSeriesDatabase(svr); } var rows = csv.Select("interval = '" + args[1] + "'"); var interval = TimeInterval.Daily; if( args[1] == "hourly") interval = TimeInterval.Hourly; Console.WriteLine("Processing "+rows.Length+" parameters"); for (int i = 0; i < rows.Length; i++) { var url = rows[i]["url"].ToString(); var cbtt = rows[i]["cbtt"].ToString(); var pcode = rows[i]["pcode"].ToString(); string[] headers = GetHeaders(rows[i]); var soffset = rows[i]["offset"].ToString(); double offset = 0; if (soffset.Trim() != "") { offset = double.Parse(soffset); } var s = ProcessFile(url,interval, cbtt, pcode,offset,true, headers); if (db != null) { SaveToDatabase(args, db, cbtt, pcode, s); } } }
public TestMidnightCalculation() { FileUtility.CleanTempPath(); string fn = FileUtility.GetTempFileName(".pdb"); Console.WriteLine(fn); var svr = new SQLiteServer(fn); db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName,false); }
/// <summary> /// Creates new database. Existing file will be overwritten /// </summary> /// <param name="filename"></param> public static void CreateNewDatabase(string filename) { SQLiteServer db = new SQLiteServer("Data Source=" + filename + ";"); if (File.Exists(filename)) { db.CloseAllConnections(); File.Delete(filename); } }
public void ImportDecodesAndProcessWithFlagLimits() { Logger.EnableLogger(); FileUtility.CleanTempPath(); var fn1 = FileUtility.GetTempFileName(".pdb"); Console.WriteLine(fn1); var svr = new SQLiteServer(fn1); var db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName,false); var tmpDir = CopyTestDecodesFileToTempDirectory("decodes_lapo.txt"); var rtlapo = CreateTempRatingTable("lapo.csv", new double[] {3.50,3.54,3.55,5.54 }, new double[] {1,2,3,10 }); // set limits gh: low=3.53, high 3.6, rate of change/hour 1 Quality q = new Quality(db); q.SaveLimits("instant_lapo_gh", 3.6, 3.53, 1.0); q.SaveLimits("instant_lapo_q", 5, 1.1, 0); var site = db.GetSiteCatalog(); site.AddsitecatalogRow("lapo", "", "OR"); db.Server.SaveTable(site); var c = new CalculationSeries("instant_lapo_q"); c.SiteID = "lapo"; c.Expression = "FileRatingTable(%site%_gh,\""+rtlapo+"\")"; db.AddSeries(c); //SeriesExpressionParser.Debug = true; FileImporter import = new FileImporter(db); import.Import(tmpDir,RouteOptions.None,computeDependencies:true,searchPattern:"*.txt"); db.Inventory(); var s = db.GetSeriesFromTableName("instant_lapo_gh"); var expectedFlags = new string[] { "", "", "", "+", "", "", "", "-" }; for (int i = 0; i < s.Count; i++) { Assert.AreEqual(expectedFlags[i], s[i].Flag, " flag not expected "); } s = db.GetSeriesFromTableName("instant_lapo_q"); s.Read(); Assert.IsTrue(s.Count > 0, "No flow data computed lapo"); s.WriteToConsole(true); // computed flows should be: 2 2 2 10 2 2 1 expectedFlags = new string[]{"","","","+","","","","-"}; //q>=1 and q<= 5 for (int i = 0; i < s.Count; i++) { Assert.AreEqual(expectedFlags[i], s[i].Flag.Trim()," Flag check on Flow (Q) "); } SeriesExpressionParser.Debug = false; }
public void ImportParameterWithUnderscore() { Logger.OnLogEvent += Logger_OnLogEvent; var fn1 = FileUtility.GetTempFileName(".pdb"); Console.WriteLine(fn1); var svr = new SQLiteServer(fn1); var db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName,false); var tmpDir = TestRatingTableDependency.CopyTestDecodesFileToTempDirectory("instant_20150708152901.txt"); FileImporter import = new FileImporter(db); import.Import(tmpDir, RouteOptions.None, computeDependencies: true, searchPattern: "*.txt"); db.Inventory(); //Assert.IsFalse(anyErrors); }
public void ReadExcelScenario() { string fn = Path.Combine(TestData.DataPath, "Scenarios", "InputScenarioConfig.xlsx"); var ds = new ScenarioManagement.ScenarioDataSet(); var fn1 = FileUtility.GetTempFileName(".pdb"); SQLiteServer svr = new SQLiteServer(fn1); var db = new TimeSeriesDatabase(svr,false); ds.Import(fn, db); Assert.IsTrue(ds.ScenarioMapping.Count > 0); // create Scenarios svr.CloseAllConnections(); File.Delete(fn1); }
public void AddBug() { DateTime t1 = new DateTime(2013,1,1); var t2 = t1.AddDays(365); var fn = FileUtility.GetTempFileNameInDirectory(@"c:\temp\",".pdb"); SQLiteServer svr = new SQLiteServer(fn); TimeSeriesDatabase db = new Reclamation.TimeSeries.TimeSeriesDatabase(svr,false); var s = new HydrometDailySeries("pal","af"); s.Name = "pal_af"; s.Read(t1,t2); db.AddSeries(s); var cs = new CalculationSeries("add_test"); cs.Expression = "pal_af + pal_af"; db.AddSeries(cs); cs = db.GetSeriesFromName("add_test") as CalculationSeries; cs.Calculate(t1, t2); cs = db.GetSeriesFromName("add_test") as CalculationSeries; cs.Read(); Assert.IsTrue(cs.Count > 0); }
public void Interpolate2DWithDatabase() { Logger.EnableLogger(); var fn = FileUtility.GetTempFileName(".pdb"); File.Delete(fn); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr,false ); var c = new CalculationSeries("rir_q"); var path = Path.Combine(TestData.DataPath, "rating_tables"); path = Path.Combine(path, "rir_q.txt"); c.Expression = "FileLookupInterpolate2D(rir_fb, rir_ra, \"" + path + "\")" + " + FileLookupInterpolate2D(rir_fb, rir_rb, \"" + path + "\")"; c.TimeInterval = TimeInterval.Irregular; db.AddSeries(c); var fb = new Series("rir_fb"); fb.TimeInterval = TimeInterval.Irregular; db.AddSeries(fb); fb.Add("6-1-2011", 5110.99); fb.Add("6-2-2011", 5111.31); fb.Add("6-3-2011", 5111.71); fb.Add("6-4-2011", 5112.09); var ra = new Series("rir_ra"); ra.TimeInterval = TimeInterval.Irregular; ra.Add("6-1-2011", 2.1); ra.Add("6-2-2011", 1.29); ra.Add("6-3-2011", 1.29); ra.Add("6-4-2011", 1.29); db.AddSeries(ra); var rb = new Series("rir_rb"); rb.TimeInterval = TimeInterval.Irregular; rb.Add("6-1-2011", 2.1); rb.Add("6-2-2011", 1.28); rb.Add("6-3-2011", 1.28); rb.Add("6-4-2011", 1.28); db.AddSeries(rb); TimeSeriesImporter ti = new TimeSeriesImporter(db); ti.Import(fb, computeDependencies: true);// this should force a calculation... var q = db.GetSeriesFromTableName("rir_q"); Assert.NotNull(q, "Series not created"); q.Read(); /* * Flows from Hydromet * 6-1-2011, 1009.87 * 6-2-2011, 602.24 * 6-3-2011, 603.32 * 6-4-2011, 604.34 */ Assert.AreEqual(4, q.Count); Assert.AreEqual(1009.87, System.Math.Round(q[0].Value, 2)); Assert.AreEqual(603.32, System.Math.Round(q[2].Value, 2)); }
private static void CreatePiscesDatabaseWithModsimNodeNames( TimeSeriesDatabase dbVic, DataTable correlation, string period, string scenario = "") { string fn = period + scenario + ".pdb"; if (File.Exists(fn)) { Console.WriteLine("Warning: Overwriting existing file " + fn); File.Delete(fn); } else { Console.WriteLine("creating " + fn); } SQLiteServer svr = new SQLiteServer(fn); TimeSeriesDatabase db = new TimeSeriesDatabase(svr); VariableResolver vr = new VariableResolver(dbVic, LookupOption.SeriesName); Console.WriteLine("-------------------------------"); Console.WriteLine("database records"); Console.WriteLine("Series saved"); Console.WriteLine("-------------------------------"); for (int i = 0; i < correlation.Rows.Count; i++) { var gain = correlation.Rows[i]["ModsimGain"].ToString(); var neg = correlation.Rows[i]["ModsimNeg"].ToString(); var equation = correlation.Rows[i]["Equation"].ToString().Trim(); if (equation != "") { equation = IncludeScenarioAndPeriod(equation, period, scenario); AddSeries(period, scenario, db, vr, gain, equation, ModsimType.Gain); AddSeries(period, scenario, db, vr, neg, equation, ModsimType.Negative); } } }
// ENTRY POINT FOR PROCESSING static void ProcessRogueBiOP(DateTime t1, DateTime t2, string piscesFile) { // Toggle to read flagged data HydrometInstantSeries.KeepFlaggedData = true; // Create pisces database to store data if (File.Exists(piscesFile)) File.Delete(piscesFile); var DB = new SQLiteServer(piscesFile); var pDB = new TimeSeriesDatabase(DB); // PROCESS INSTANT DATA PiscesFolder rFldr = pDB.AddFolder("RawData"); Console.Write("Processing Instant Series... "); var emiQ = GetInstantSeries("EMI", "Q", t1, t2, pDB, rFldr); var emiQC = GetInstantSeries("EMI", "QC", t1, t2, pDB, rFldr); var basoQ = GetInstantSeries("BASO", "Q", t1, t2, pDB, rFldr); var taloQC = GetInstantSeries("TALO", "QC", t1, t2, pDB, rFldr); var bctoQ = GetInstantSeries("BCTO", "Q", t1, t2, pDB, rFldr); var phxoQC = GetInstantSeries("PHXO", "QC", t1, t2, pDB, rFldr); var giloQ = GetInstantSeries("GILO", "Q", t1, t2, pDB, rFldr); var giloGH = GetInstantSeries("GILO", "GH", t1, t2, pDB, rFldr); var dicoQC = GetInstantSeries("DICO", "QC", t1, t2, pDB, rFldr); var slboQC = GetInstantSeries("SLBO", "QC", t1, t2, pDB, rFldr); var antoQ = GetInstantSeries("ANTO", "Q", t1, t2, pDB, rFldr); var antoQC = GetInstantSeries("ANTO", "QC", t1, t2, pDB, rFldr); var antoGH = GetInstantSeries("ANTO", "GH", t1, t2, pDB, rFldr); Console.WriteLine("Done importing instant data!"); // PROCESS HOURLY DATA PiscesFolder dFldr = pDB.AddFolder("HourlyData"); Console.WriteLine(""); Console.Write("Processing Hourly Series... "); var emiQ_h = ProcessHourlySeries(emiQ, "EMI_Q", pDB, dFldr); var emiQC_h = ProcessHourlySeries(emiQC, "EMI_QC", pDB, dFldr); var basoQ_h = ProcessHourlySeries(basoQ, "BASO_Q", pDB, dFldr); var taloQC_h = ProcessHourlySeries(taloQC, "TALO_QC", pDB, dFldr); var bctoQ_h = ProcessHourlySeries(bctoQ, "BCTO_Q", pDB, dFldr); var phxoQC_h = ProcessHourlySeries(phxoQC, "PHXO_QC", pDB, dFldr); var giloQ_h = ProcessHourlySeries(giloQ, "GILO_Q", pDB, dFldr); var giloGH_h = ProcessHourlySeries(giloGH, "GILO_GH", pDB, dFldr); var dicoQC_h = ProcessHourlySeries(dicoQC, "DICO_QC", pDB, dFldr); var slboQC_h = ProcessHourlySeries(slboQC, "SLBO_QC", pDB, dFldr); var antoQ_h = ProcessHourlySeries(antoQ, "ANTO_Q", pDB, dFldr); var antoQC_h = ProcessHourlySeries(antoQC, "ANTO_QC", pDB, dFldr); var antoGH_h = ProcessHourlySeries(antoGH, "ANTO_GH", pDB, dFldr); Console.WriteLine("Done computing hourly data!"); // CHECK BIOP STUFF Console.WriteLine(""); Console.WriteLine("Data Processing: Checking Ramping Rates and Flows..."); Console.WriteLine(""); PiscesFolder ckFldr = pDB.AddFolder("RampingRateChecks"); Console.WriteLine("Checking EMI flows"); Series EMIHourlyDownRamp = CheckEMIHourlyDownRampingRate(emiQ_h); EMIHourlyDownRamp = CheckSourceSeries(emiQ_h, EMIHourlyDownRamp); pDB.AddSeries(EMIHourlyDownRamp, ckFldr); Series EMIDailyDownRamp = CheckEMIDailyDownRampingRate(emiQ_h); EMIDailyDownRamp = CheckSourceSeries(emiQ_h, EMIDailyDownRamp); pDB.AddSeries(EMIDailyDownRamp, ckFldr); Series EMIHourlyUpRamp = CheckEMIUpRampingRate(emiQ_h); EMIHourlyUpRamp = CheckSourceSeries(emiQ_h, EMIHourlyUpRamp); pDB.AddSeries(EMIHourlyUpRamp, ckFldr); Console.WriteLine("Checking BASO flows"); Series BASOHourlyDownRamp = CheckBASODownRampingRate(basoQ_h, taloQC_h); BASOHourlyDownRamp = CheckSourceSeries(basoQ_h, BASOHourlyDownRamp); BASOHourlyDownRamp = CheckSourceSeries(taloQC_h, BASOHourlyDownRamp); pDB.AddSeries(BASOHourlyDownRamp, ckFldr); Console.WriteLine("Checking BCTO flows"); Series BCTOHourlyDownRamp = CheckBCTODownRampingRate(bctoQ_h, phxoQC_h); BCTOHourlyDownRamp = CheckSourceSeries(bctoQ_h, BCTOHourlyDownRamp); BCTOHourlyDownRamp = CheckSourceSeries(phxoQC_h, BCTOHourlyDownRamp); pDB.AddSeries(BCTOHourlyDownRamp, ckFldr); Console.WriteLine("Checking GILO flows and gage height"); Series GILOUpRamp = new Series(); Series GILODownRamp = new Series(); CheckGILOFlowRampingRate(giloQ_h, slboQC_h, dicoQC_h, out GILODownRamp, out GILOUpRamp); GILODownRamp = CheckSourceSeries(giloQ_h, GILODownRamp); GILODownRamp = CheckSourceSeries(slboQC_h, GILODownRamp); GILODownRamp = CheckSourceSeries(dicoQC_h, GILODownRamp); GILOUpRamp = CheckSourceSeries(giloQ_h, GILOUpRamp); GILOUpRamp = CheckSourceSeries(slboQC_h, GILOUpRamp); GILOUpRamp = CheckSourceSeries(dicoQC_h, GILOUpRamp); pDB.AddSeries(GILOUpRamp, ckFldr); pDB.AddSeries(GILODownRamp, ckFldr); Series GILOGageUpRamp = CheckGILOGageRampingRate(giloGH_h, slboQC_h, dicoQC_h); GILOGageUpRamp = CheckSourceSeries(giloGH_h, GILOGageUpRamp); GILOGageUpRamp = CheckSourceSeries(slboQC_h, GILOGageUpRamp); GILOGageUpRamp = CheckSourceSeries(dicoQC_h, GILOGageUpRamp); pDB.AddSeries(GILOGageUpRamp, ckFldr); Console.WriteLine("Checking ANTO flows and gage height"); Series ANTOUpRamp = new Series(); Series ANTODownRamp = new Series(); CheckANTOFlowRampingRate(antoQ_h, antoQC_h, out ANTODownRamp, out ANTOUpRamp); ANTODownRamp = CheckSourceSeries(antoQ_h, ANTODownRamp); ANTODownRamp = CheckSourceSeries(antoQC_h, ANTODownRamp); ANTOUpRamp = CheckSourceSeries(antoQ_h, ANTOUpRamp); ANTOUpRamp = CheckSourceSeries(antoQC_h, ANTOUpRamp); pDB.AddSeries(ANTOUpRamp, ckFldr); pDB.AddSeries(ANTODownRamp, ckFldr); Series ANTOGageUpRamp = CheckANTOGageRampingRate(antoGH_h, antoQC_h); ANTOGageUpRamp = CheckSourceSeries(antoGH_h, ANTOGageUpRamp); ANTOGageUpRamp = CheckSourceSeries(antoQC_h, ANTOGageUpRamp); pDB.AddSeries(ANTOGageUpRamp, ckFldr); }
public void TestSQLite() { var fn = FileUtility.GetTempFileName(".pdb"); System.IO.File.Delete(fn); SQLiteServer svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr,false); BasicDatabaseTest(db); }
public void ImportDecodesWithMissingGageHeight() { FileUtility.CleanTempPath(); var fn1 = FileUtility.GetTempFileName(".pdb"); Console.WriteLine(fn1); var svr = new SQLiteServer(fn1); var db = new TimeSeriesDatabase(svr, Reclamation.TimeSeries.Parser.LookupOption.TableName,false); Logger.EnableLogger(); var tmpDir = CopyTestDecodesFileToTempDirectory("decodes_mabo_missing_gh.txt"); var ratingTableFileName =CreateTempRatingTable("mabo.csv", 2.37, 2.8, x => (x*10)); var c = new CalculationSeries("instant_mabo_q"); c.Expression = "FileRatingTable(mabo_gh,\""+ratingTableFileName+"\")"; db.AddSeries(c); FileImporter import = new FileImporter(db); import.Import(tmpDir, RouteOptions.Outgoing, computeDependencies: true,searchPattern:"*.txt"); db.Inventory(); var s = db.GetSeriesFromTableName("instant_mabo_q"); s.Read(); Assert.IsTrue(s.CountMissing() == 0); Assert.IsTrue(s.Count > 0, "No flow data computed"); }
/// <summary> /// Organizing VIC climate unregulated data from one large VIC Pisces database /// into 20 Monthly MODSIM compatible *.PDB. /// </summary> /// <param name="args"></param> static void Main(string[] args) { if (args.Length != 2) { PrintUsage(); return; } var fileNamePdb = args[0]; if (!File.Exists(fileNamePdb) || !fileNamePdb.EndsWith(".pdb")) { PrintUsage(); return; } var fileNameExcel = args[1]; if (!File.Exists(fileNameExcel) || (!fileNameExcel.EndsWith(".xls") && !fileNameExcel.EndsWith(".xlsx"))) { PrintUsage(); return; } //inputs should be good, get to work SQLiteServer svrVic = new SQLiteServer(fileNamePdb); Console.WriteLine("opening " + fileNamePdb); TimeSeriesDatabase dbVic = new TimeSeriesDatabase(svrVic); var xls = new NpoiExcel(fileNameExcel); Console.WriteLine("reading " + fileNameExcel); var correlation = xls.ReadDataTable("Locals"); var forecasts = xls.ReadDataTable("Forecasts"); var period = new string[] { "2020", "2040", "2060", "2080" }; var scenario = new string[] { "Median", "MoreWarmingDry", "MoreWarmingWet", "LessWarmingDry", "LessWarmingWet" }; CreatePiscesDatabaseWithModsimNodeNames(dbVic, correlation, "", "Baseline"); AddForecastsToPiscesDatabase(dbVic, forecasts, "", "Baseline"); for (int i = 0; i < period.Length; i++) { for (int j = 0; j < scenario.Length; j++) { CreatePiscesDatabaseWithModsimNodeNames(dbVic, correlation, period[i], scenario[j]); AddForecastsToPiscesDatabase(dbVic, forecasts, period[i], scenario[j]); } } }
public TestAlarms() { var fn = FileUtility.GetTempFileName(".pdb"); this.svr = new SQLiteServer(fn); this.db = new TimeSeriesDatabase(svr); }
private void addPiscesDatabase_Click(object sender, EventArgs e) { try { Cursor = Cursors.WaitCursor; Performance p = new Performance(); OpenFileDialog fd = new OpenFileDialog(); fd.DefaultExt = "*.pdb"; fd.Filter = "Pisces database (*.pdb)|*.pdb"; if (fd.ShowDialog() == DialogResult.OK) { SQLiteServer svr = new SQLiteServer(fd.FileName); TimeSeriesDatabase db = new TimeSeriesDatabase(svr,false); DB.InsertDatabase(CurrentFolder, db); DatabaseChanged(); } UserPreference.Save("fileName", fd.FileName); p.Report("done reading " + fd.FileName); } catch (Exception ex) { MessageBox.Show(ex.Message); } finally { Cursor = Cursors.Default; } }