public static bool Import(GraphData ds, TimeSeriesDatabase db, PiscesFolder folder) { var root = folder; // db.AddFolder(folder, Path.GetFileNameWithoutExtension(filename)); var seriesCatalog = db.GetSeriesCatalog(); int sdi = db.NextSDI(); foreach (var s in ds.SeriesRows) { var instantInterval = ds.GraphRow.InstantInterval; string hostname = "hdbhost"; if (Hdb.Instance != null) { hostname = Hdb.Instance.Server.Host; } string cs = "hdb_r_table=" + s.hdb_r_table + ";hdb_site_datatype_id=" + s.hdb_site_datatype_id + ";hdb_interval=" + s.Interval + ";hdb_instant_interval=" + instantInterval + ";hdb_time_zone=" + ds.GraphRow.TimeZone + ";hdb_host_name=" + hostname; var name = s.SiteName + " " + s.ParameterType + " " + s.Units; seriesCatalog.AddSeriesCatalogRow(sdi++, root.ID, false, 1, "Hdb", name, s.SiteName, s.Units, IntervalString(s.hdb_r_table), s.ParameterType, "", "HdbOracleSeries", cs, "", "", true); } db.Server.SaveTable(seriesCatalog); return(true); }
public FolderProperties(PiscesFolder folder) { InitializeComponent(); this.folder = folder; this.Text = folder.Name + " Properties"; this.textBoxName.Text = folder.Name; }
public FolderProperties(PiscesFolder folder) { InitializeComponent(); this.folder = folder; this.Text = folder.Name + " Properties"; this.textBoxName.Text = folder.Name; labelInfo.Text = "id = " + folder.ID + " parentid = " + folder.ParentID; }
public void ModifyDatabase(Reclamation.TimeSeries.TimeSeriesDatabase db, PiscesFolder selectedFolder) { var dlg = new ImportCorpsDataQuery(); if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK) { var s = new Usace.UsaceSeries(dlg.DssPath); db.AddSeries(s, selectedFolder); } }
// Populates Pisces DB with instant 15-minute data private static Series GetInstantSeries(string CBTT, string PCODE, DateTime t1, DateTime t2, TimeSeriesDatabase pDB, PiscesFolder rFldr) { Console.Write(CBTT + "_" + PCODE + ", "); Series rval = new HydrometInstantSeries(CBTT, PCODE); rval.Read(t1, t2); rval.Name = CBTT + "_" + PCODE + "15min"; pDB.AddSeries(rval, rFldr); return(rval); }
public static void CreatePiscesTree(string fileName, PiscesFolder root, TimeSeriesDatabase db) { mi = new Model(); s_db = db; sdi = db.NextSDI(); studyFolderID = sdi; int parentID = root.ID; seriesCatalog = new TimeSeriesDatabaseDataSet.SeriesCatalogDataTable(); if (File.Exists(fileName)) { XYFileReader.Read(mi, fileName); m_xyFilename = Path.GetFileNameWithoutExtension(fileName); } else { throw new FileNotFoundException("Modsim xy file is not found " + fileName); } string mdbJetName = Path.Combine(Path.GetDirectoryName(fileName), m_xyFilename + "OUTPUT.mdb"); string mdbAceName = Path.Combine(Path.GetDirectoryName(fileName), m_xyFilename + "OUTPUT.accdb"); if (File.Exists(mdbAceName)) { m_databaseName = mdbAceName; } else { m_databaseName = mdbJetName; } if (File.Exists(m_databaseName)) { m_db = new AccessDB(m_databaseName); dir = Path.GetDirectoryName(Path.GetFullPath(m_databaseName)); //AddNewRow(sdi,parentID,true, "", mi.name, ""); AddNewRow(sdi, parentID, true, "", Path.GetFileNameWithoutExtension(fileName), ""); ReservoirsTree(); DemandsTree(); RiverLinksTree(); TotalsTree(); } else { throw new FileNotFoundException(" MODSIM output not found " + m_databaseName); } //DataTableOutput.Write(seriesCatalog, @"C:\temp\a.csv",false); db.Server.SaveTable(seriesCatalog); db.RefreshFolder(root); }
public static bool Import(string filename, TimeSeriesDatabase db, PiscesFolder folder) { var ds = new TimeSeriesDataSet(); ds.Clear(); ds.ReadXmlFile(filename); for (int i = 0; i < ds.Graph.Count; i++) { var gd = new GraphData(ds, ds.Graph[i].GraphNumber); Import(gd, db, folder); } return(true); }
public void ModifyDatabase(Reclamation.TimeSeries.TimeSeriesDatabase db, PiscesFolder selectedFolder) { var dlg = new OpenFileDialog(); dlg.Filter = "Hec Dss File (*.dss)|*.dss|All Files (*.*)|*.*"; dlg.DefaultExt = ".dss"; if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK) { Hec.HecDssTree.AddDssFileToDatabase( dlg.FileName, selectedFolder, db); } }
public static void CreateTree(TimeSeriesDatabase DB, PiscesFolder selectedFolder) { string[] variablesFile = Web.GetPage(UrgsimPath + "WWCRA_variables.csv", true); // build Pisces tree var root = DB.AddFolder(selectedFolder, "URGSiM"); var sc = DB.GetSeriesCatalog(); var sr = DB.GetNewSeriesRow(); int id = sr.id; string prevFolderName = ""; int folderID = root.ID; for (int j = 1; j < variablesFile.Length; j++) {//add urgsim variables to pisces tree as urgsim series string[] tokens = variablesFile[j].Trim().Split(','); if (tokens.Length < 2) { continue; } string piscesFolderName = tokens[0]; string variableName = tokens[1]; string dataFolderName = "rawflows"; if (piscesFolderName.StartsWith("input-")) { dataFolderName = "inputdata"; } var s = new Urgsim.UrgsimSeries(UrgsimPath, dataFolderName + "/bccr_bcm2_0.1.sresa1b", variableName); s.TimeInterval = TimeInterval.Monthly; if (prevFolderName != piscesFolderName) {// create new folder. sc.AddFolder(piscesFolderName, id, root.ID); folderID = id; id++; prevFolderName = piscesFolderName; } sc.AddSeriesCatalogRow(s, id, folderID, ""); id++; } DB.Server.SaveTable(sc); }
private void treeView1_DragDrop(object sender, DragEventArgs e) { treeView1.BeginUpdate(); TreeNodeAdv[] nodes = (TreeNodeAdv[])e.Data.GetData(typeof(TreeNodeAdv[])); TreeNodeAdv dropNodeAdv = treeView1.DropPosition.Node; PiscesObject dropNode = treeView1.DropPosition.Node.Tag as PiscesObject; if (treeView1.DropPosition.Position == NodePosition.Inside && dropNode is PiscesFolder) { foreach (TreeNodeAdv n in nodes) { PiscesObject o = n.Tag as PiscesObject; OnNodeParentChanged(new ParentChangedEventArgs(o, dropNode as PiscesFolder)); } treeView1.DropPosition.Node.IsExpanded = true; } else { PiscesFolder parent = dropNodeAdv.Parent.Tag as PiscesFolder; PiscesObject nextItem = dropNode; int sortOrder = nextItem.SortOrder; if (treeView1.DropPosition.Position == NodePosition.Before) { sortOrder--; } foreach (TreeNodeAdv node in nodes) { OnNodeParentChanged(new ParentChangedEventArgs(node.Tag as PiscesObject, parent)); } Console.WriteLine("sort order for dragged object = " + sortOrder); foreach (TreeNodeAdv node in nodes) { PiscesObject o = node.Tag as PiscesObject; OnNodeSortOrderChanged(new SortChangedEventArgs(o, sortOrder)); } } treeView1.EndUpdate(); }
public static void AddDssFileToDatabase(string dssFilename, PiscesFolder parent, TimeSeriesDatabase db) { if (parent == null) { parent = db.RootFolder; } PiscesFolder root = parent; try { string[] paths = GetCatalog(dssFilename); root = db.AddFolder(parent, Path.GetFileName(dssFilename)); var sc = db.GetSeriesCatalog(); int folderID = root.ID; string previousA = ""; //db.SuspendTreeUpdates(); for (int i = 0; i < paths.Length; i++) { HecDssPath p = new HecDssPath(paths[i]); if (i == 0 || p.A != previousA) { folderID = sc.AddFolder(p.A, root.ID); previousA = p.A; } HecDssSeries s = new HecDssSeries(dssFilename, paths[i]); sc.AddSeriesCatalogRow(s, sc.NextID(), folderID); } db.Server.SaveTable(sc); } catch (Exception e) { System.Windows.Forms.MessageBox.Show(e.Message); } finally { //db.ResumeTreeUpdates(); //db.RefreshFolder(parent); } }
static Series LabelAndSave30Year(TimeSeriesDatabase db1, int year, string cbtt, string pcode, PiscesFolder folder, HydrometHost host) { Series s = new HydrometDailySeries(cbtt, pcode, host); var t1 = new DateTime(year - 1, 10, 1); var t2 = new DateTime(year, 9, 30); int t3 = (year) - 1; int t4 = year; s.Read(t1, t2); s.Provider = "Series"; s.Source = ""; s = Reclamation.TimeSeries.Math.ShiftToYear(s, 2000); s.Name = t3 + "-" + t4 + cbtt + " " + pcode; s.Table.TableName = t4 + cbtt + "" + pcode; if (s.Count > 0 && s.CountMissing() < 3) { db1.AddSeries(s, folder); } return(s); }
/// <summary> /// Propertie can be selected for a single object /// </summary> private void Properties(object sender, EventArgs e) { try { PiscesObject v = tree1.SelectedObject; if (v is Series) { Series s = v as Series; string tmpExp = s.Expression; SeriesProperties p = new SeriesProperties(s, DB); if (p.ShowDialog() == DialogResult.OK) { DB.SaveProperties(s); if (s is CalculationSeries && tmpExp != s.Expression && s.Expression.Trim() != "") { // ShowAsBusy("calculating " + s.Expression); //(s as CalculationSeries).Calculate(); } //tree1_SelectionChanged(this, EventArgs.Empty); DrawBasedOnTreeSelection(); } } else if (v is PiscesFolder) { PiscesFolder f = v as PiscesFolder; FolderProperties p = new FolderProperties(f); if (p.ShowDialog() == DialogResult.OK) { DB.SaveProperties(f); } } } catch (Exception propEx) { MessageBox.Show(propEx.Message); } }
private void newFolder_Click(object sender, EventArgs e) { PiscesFolder f = CurrentFolder; DB.AddFolder(f, ""); }
public ParentChangedEventArgs(PiscesObject o, PiscesFolder folder) { m_piscesObject = o; m_folder = folder; }
private void toolStripMenuItemAddFolder_Click(object sender, EventArgs e) { PiscesFolder f = CurrentFolder; f = DB.AddFolder(f, ""); }
static void Main(string[] args) { if (args.Length < 4 || args.Length > 5) { Console.WriteLine("Usage:"); Console.WriteLine("30_year_avg creates 30 year average pisces database"); Console.WriteLine("Usage: 30_year_avg config.csv group|all output.db boise|yakima [hmet.txt]"); Console.WriteLine("config.csv example below:\n"); Console.WriteLine("group,station,daily_pcode,title,ylabel"); Console.WriteLine("Boise Payette,plei,qd,\"Payette River near Letha, ID\",Discharge - cfs"); Console.WriteLine(" Boise Payette,emm,qd,\"Payette River near Emmett, ID\",Discharge - cfs"); Console.WriteLine("\ngroup is used to filter specific parts of config file. enter all to disable filtering"); Console.WriteLine("output.db is the name of a pisces database that will be created."); Console.WriteLine("boise|yakima specifiy which hydromet server to read data from"); Console.WriteLine("hmet.txt is an optional output with hydromet daily format"); return; } string fn = args[2]; if (File.Exists(fn)) { Console.WriteLine("Deleting existing database "); File.Delete(fn); } var svr = new SQLiteServer(fn); var db = new TimeSeriesDatabase(svr); HydrometHost host = HydrometHost.PN; if (args[3] == "yakima") { host = HydrometHost.Yakima; } DataTable config = new CsvFile(args[0]); if (args[1] != "all") { // filter out specific group config = DataTableUtility.Select(config, "group = '" + args[1] + "'", ""); } if (args.Length == 5 && File.Exists(args[4])) { Console.WriteLine("deleting " + args[4]); File.Delete(args[4]); } var prevFolderName = Guid.NewGuid().ToString(); PiscesFolder folder = null; for (int x = 0; x < config.Rows.Count; x++) { var row = config.Rows[x]; string folderName = row["group"].ToString(); if (prevFolderName != folderName) { prevFolderName = folderName; folder = db.AddFolder(folderName); } string CBTT = row["station"].ToString(); string Pcode = row["daily_pcode"].ToString(); Console.WriteLine(CBTT + " " + Pcode); Series s = new HydrometDailySeries(CBTT, Pcode, host); // Data ranges collected var t1 = new DateTime(1980, 10, 1); var t2 = new DateTime(2010, 9, 30); s.Read(t1, t2); var s7100 = LabelAndSave30Year(db, 7100, CBTT, Pcode, folder, host); var s8110 = LabelAndSave30Year(db, 8110, CBTT, Pcode, folder, host); var s6190 = LabelAndSave30Year(db, 6190, CBTT, Pcode, folder, host); //Creates thirty-year average from raw data and adds to database var avg = Reclamation.TimeSeries.Math.MultiYearDailyAverage(s, 10); avg.Name = "avg 1981-2010 " + CBTT + " " + Pcode; avg.Table.TableName = "avg_1981_2010" + CBTT + "" + Pcode; db.AddSeries(avg, folder); avg = Reclamation.TimeSeries.Math.ShiftToYear(avg, 8109); if (args.Length == 5) { HydrometDailySeries.WriteToArcImportFile(avg, CBTT, Pcode, args[4], true); } } }
// ENTRY POINT FOR PROCESSING static void ProcessRogueBiOP(DateTime t1, DateTime t2, string piscesFile) { // Toggle to read flagged data HydrometInstantSeries.KeepFlaggedData = true; // Create pisces database to store data if (File.Exists(piscesFile)) { File.Delete(piscesFile); } var DB = new SQLiteServer(piscesFile); var pDB = new TimeSeriesDatabase(DB); // PROCESS INSTANT DATA PiscesFolder rFldr = pDB.AddFolder("RawData"); Console.Write("Processing Instant Series... "); var emiQ = GetInstantSeries("EMI", "Q", t1, t2, pDB, rFldr); var emiQC = GetInstantSeries("EMI", "QC", t1, t2, pDB, rFldr); var basoQ = GetInstantSeries("BASO", "Q", t1, t2, pDB, rFldr); var taloQC = GetInstantSeries("TALO", "QC", t1, t2, pDB, rFldr); var bctoQ = GetInstantSeries("BCTO", "Q", t1, t2, pDB, rFldr); var phxoQC = GetInstantSeries("PHXO", "QC", t1, t2, pDB, rFldr); var giloQ = GetInstantSeries("GILO", "Q", t1, t2, pDB, rFldr); var giloGH = GetInstantSeries("GILO", "GH", t1, t2, pDB, rFldr); var dicoQC = GetInstantSeries("DICO", "QC", t1, t2, pDB, rFldr); var slboQC = GetInstantSeries("SLBO", "QC", t1, t2, pDB, rFldr); var antoQ = GetInstantSeries("ANTO", "Q", t1, t2, pDB, rFldr); var antoQC = GetInstantSeries("ANTO", "QC", t1, t2, pDB, rFldr); var antoGH = GetInstantSeries("ANTO", "GH", t1, t2, pDB, rFldr); Console.WriteLine("Done importing instant data!"); // PROCESS HOURLY DATA PiscesFolder dFldr = pDB.AddFolder("HourlyData"); Console.WriteLine(""); Console.Write("Processing Hourly Series... "); var emiQ_h = ProcessHourlySeries(emiQ, "EMI_Q", pDB, dFldr); var emiQC_h = ProcessHourlySeries(emiQC, "EMI_QC", pDB, dFldr); var basoQ_h = ProcessHourlySeries(basoQ, "BASO_Q", pDB, dFldr); var taloQC_h = ProcessHourlySeries(taloQC, "TALO_QC", pDB, dFldr); var bctoQ_h = ProcessHourlySeries(bctoQ, "BCTO_Q", pDB, dFldr); var phxoQC_h = ProcessHourlySeries(phxoQC, "PHXO_QC", pDB, dFldr); var giloQ_h = ProcessHourlySeries(giloQ, "GILO_Q", pDB, dFldr); var giloGH_h = ProcessHourlySeries(giloGH, "GILO_GH", pDB, dFldr); var dicoQC_h = ProcessHourlySeries(dicoQC, "DICO_QC", pDB, dFldr); var slboQC_h = ProcessHourlySeries(slboQC, "SLBO_QC", pDB, dFldr); var antoQ_h = ProcessHourlySeries(antoQ, "ANTO_Q", pDB, dFldr); var antoQC_h = ProcessHourlySeries(antoQC, "ANTO_QC", pDB, dFldr); var antoGH_h = ProcessHourlySeries(antoGH, "ANTO_GH", pDB, dFldr); Console.WriteLine("Done computing hourly data!"); // CHECK BIOP STUFF Console.WriteLine(""); Console.WriteLine("Data Processing: Checking Ramping Rates and Flows..."); Console.WriteLine(""); PiscesFolder ckFldr = pDB.AddFolder("RampingRateChecks"); Console.WriteLine("Checking EMI flows"); Series EMIHourlyDownRamp = CheckEMIHourlyDownRampingRate(emiQ_h); EMIHourlyDownRamp = CheckSourceSeries(emiQ_h, EMIHourlyDownRamp); pDB.AddSeries(EMIHourlyDownRamp, ckFldr); Series EMIDailyDownRamp = CheckEMIDailyDownRampingRate(emiQ_h); EMIDailyDownRamp = CheckSourceSeries(emiQ_h, EMIDailyDownRamp); pDB.AddSeries(EMIDailyDownRamp, ckFldr); Series EMIHourlyUpRamp = CheckEMIUpRampingRate(emiQ_h); EMIHourlyUpRamp = CheckSourceSeries(emiQ_h, EMIHourlyUpRamp); pDB.AddSeries(EMIHourlyUpRamp, ckFldr); Console.WriteLine("Checking BASO flows"); Series BASOHourlyDownRamp = CheckBASODownRampingRate(basoQ_h, taloQC_h); BASOHourlyDownRamp = CheckSourceSeries(basoQ_h, BASOHourlyDownRamp); BASOHourlyDownRamp = CheckSourceSeries(taloQC_h, BASOHourlyDownRamp); pDB.AddSeries(BASOHourlyDownRamp, ckFldr); Console.WriteLine("Checking BCTO flows"); Series BCTOHourlyDownRamp = CheckBCTODownRampingRate(bctoQ_h, phxoQC_h); BCTOHourlyDownRamp = CheckSourceSeries(bctoQ_h, BCTOHourlyDownRamp); BCTOHourlyDownRamp = CheckSourceSeries(phxoQC_h, BCTOHourlyDownRamp); pDB.AddSeries(BCTOHourlyDownRamp, ckFldr); Console.WriteLine("Checking GILO flows and gage height"); Series GILOUpRamp = new Series(); Series GILODownRamp = new Series(); CheckGILOFlowRampingRate(giloQ_h, slboQC_h, dicoQC_h, out GILODownRamp, out GILOUpRamp); GILODownRamp = CheckSourceSeries(giloQ_h, GILODownRamp); GILODownRamp = CheckSourceSeries(slboQC_h, GILODownRamp); GILODownRamp = CheckSourceSeries(dicoQC_h, GILODownRamp); GILOUpRamp = CheckSourceSeries(giloQ_h, GILOUpRamp); GILOUpRamp = CheckSourceSeries(slboQC_h, GILOUpRamp); GILOUpRamp = CheckSourceSeries(dicoQC_h, GILOUpRamp); pDB.AddSeries(GILOUpRamp, ckFldr); pDB.AddSeries(GILODownRamp, ckFldr); Series GILOGageUpRamp = CheckGILOGageRampingRate(giloGH_h, slboQC_h, dicoQC_h); GILOGageUpRamp = CheckSourceSeries(giloGH_h, GILOGageUpRamp); GILOGageUpRamp = CheckSourceSeries(slboQC_h, GILOGageUpRamp); GILOGageUpRamp = CheckSourceSeries(dicoQC_h, GILOGageUpRamp); pDB.AddSeries(GILOGageUpRamp, ckFldr); Console.WriteLine("Checking ANTO flows and gage height"); Series ANTOUpRamp = new Series(); Series ANTODownRamp = new Series(); CheckANTOFlowRampingRate(antoQ_h, antoQC_h, out ANTODownRamp, out ANTOUpRamp); ANTODownRamp = CheckSourceSeries(antoQ_h, ANTODownRamp); ANTODownRamp = CheckSourceSeries(antoQC_h, ANTODownRamp); ANTOUpRamp = CheckSourceSeries(antoQ_h, ANTOUpRamp); ANTOUpRamp = CheckSourceSeries(antoQC_h, ANTOUpRamp); pDB.AddSeries(ANTOUpRamp, ckFldr); pDB.AddSeries(ANTODownRamp, ckFldr); Series ANTOGageUpRamp = CheckANTOGageRampingRate(antoGH_h, antoQC_h); ANTOGageUpRamp = CheckSourceSeries(antoGH_h, ANTOGageUpRamp); ANTOGageUpRamp = CheckSourceSeries(antoQC_h, ANTOGageUpRamp); pDB.AddSeries(ANTOGageUpRamp, ckFldr); }
// Does the conversion from an instant series to an hourly series. private static Series ProcessHourlySeries(Series sIn, string sName, TimeSeriesDatabase pDB, PiscesFolder dFldr) { Console.Write(sName + ", "); // Hourly averaging Series rval = Reclamation.TimeSeries.Math.Average(sIn, TimeInterval.Hourly); rval.Provider = "Series"; rval.Name = sName; pDB.AddSeries(rval, dFldr); return(rval); }
/********************************** * * Create a tree file that is usable by pisces * * input : riverware rdf file * output : comma seperated tree file. * * // example portion of input file. (this input file is using snapshots in riverware) ----------------------------------- object_type: SnapShotObj object_name: Most Likely 2 slot_name: Andrews Gage 12447390 at RM 3_5_Gage Outflow END_SLOT_PREAMBLE units: cfs // example output. There is no nesting of tree levels for now. RiverwareName,Description,RiverwareDataType,Level,Units Riverware Results,,,0, Yakima River at Parker PARW,Yakima River at Parker PARW,Gage Outflow,1,cfs Yakima River at Grandview,Yakima River at Grandview,Gage Inflow,1,cfs ... **********************************/ public static void AddRiverWareFileToDatabase(string rdfFilename, PiscesFolder parent, TimeSeriesDatabase db) { Reclamation.Core.TextFile tf = new Reclamation.Core.TextFile(rdfFilename); #region notes /* SnapShotStyle... ------------------------------------- 2001-9-29 24:00 2001-9-30 24:00 object_type: SnapShotObj object_name: Most Likely 2 ### scernario name slot_name: Andrews Gage 12447390 at RM 3_5_Gage Outflow # object_name slotName are combined. END_SLOT_PREAMBLE units: cfs Regular Style ... --------------------------------------- END_COLUMN END_SLOT object_type: StreamGage object_name: Yakima 202_0 at Easton EASW slot_name: Gage Outflow END_SLOT_PREAMBLE units: cfs scale: 1 */ #endregion int number_of_runs = LookupNumberOfRuns(tf); PiscesFolder folder = parent; if (number_of_runs == 1) folder = db.AddFolder(parent, Path.GetFileNameWithoutExtension(rdfFilename)); int sz = tf.Length; // object_type and object_name should occur on consecutive lines. int index = tf.IndexOfBoth("object_name:", "slot_name:", 0); var objectList = new List<string>(); //list to avoid duplicates in tree Performance p1 = new Performance(); Performance p2 = new Performance(); p2.Pause(); int counter = 0; db.SuspendTreeUpdates(); var sc = db.GetSeriesCatalog(); Dictionary<string, int> objTypeID = new Dictionary<string, int>(); Dictionary<string, int> objNameID = new Dictionary<string, int>(); while (index < sz && index > 0) { //slot_name: Andrews Gage 12447390 at RM 3_5_Gage Outflow string slot_name = tf[index + 1].Substring(11); //Andrews Gage 12447390 at RM 3_5_Gage Outflow string object_type = tf[index - 1].Substring(13); string object_name = tf[index].Substring(13); string units = tf[index + 3].Substring(6).Trim(); string tag = object_name + ":" + slot_name; if (!objectList.Contains(tag)) { int scenarioNumber = -1; if (number_of_runs > 1) scenarioNumber = 1; RiverWareSeries s; if (object_type == "SnapShotObj") s = new RiverWareSeries(rdfFilename, "", slot_name, scenarioNumber, true, units); else s = new RiverWareSeries(rdfFilename, object_name, slot_name, scenarioNumber, false, units); s.Units = units; s.ConnectionString = ConnectionStringUtility.MakeFileNameRelative(s.ConnectionString, db.DataSource); p2.Continue(); if (object_type.Contains("Reservoir")) { object_type = "Reservoir"; } else if (object_type.Contains("Reach")) { object_type = "Reach"; } else if (object_type.Contains("Diversion")) { object_type = "Diversion"; } else if (object_type.Contains("Canal")) { object_type = "Canal"; } int id = sc.NextID(); if (!sc.FolderExists(object_type, folder.ID)) { objTypeID.Add(object_type, id); sc.AddFolder(object_type, id, folder.ID); id++; } if (!sc.FolderExists(object_name, objTypeID[object_type])) { objNameID.Add(object_name, id); sc.AddFolder(object_name, id, objTypeID[object_type]); id++; } sc.AddSeriesCatalogRow(s, id, objNameID[object_name], ""); objectList.Add(tag); } index = tf.IndexOfBoth( "object_name:", "slot_name:", index + 2); counter++; } p1.Report("total"); p2.Report("db.add()"); //398.7732813 seconds elapsed. total //384.6792607 seconds elapsed. db.add() // disable tree refresh (doubles perf) // 255.9736646 seconds elapsed. total // 241.7702669 seconds elapsed. db.add() // implemented member ExternalDataSource //34.8756696 seconds elapsed. total //20.3753912 seconds elapsed. db.add() var convention = Reclamation.TimeSeries.RiverWare.ImportRiverWare.ScenarioConvention.Default; if (number_of_runs > 1) // Multiple runs. {// show dialog to allow water year naming or traces var dlg = new Reclamation.TimeSeries.RiverWare.ImportRiverWare(); if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK) { convention = dlg.NamingConvention; } // Add Scenarios. var tblScen = db.GetScenarios(); for (int i = 0; i < number_of_runs; i++) { string name = "Run" + i; if (convention == RiverWare.ImportRiverWare.ScenarioConvention.ByYear) { name = (dlg.FirstYear + i ).ToString(); } //string scenarioPath = ConnectionStringUtility.MakeFileNameRelative("FileName=" + item, DB.Filename); tblScen.AddScenarioRow(name, true, "ScenarioNumber=" + (i + 1).ToString(), 0); } db.Server.SaveTable(tblScen); } db.Server.SaveTable(sc); db.ResumeTreeUpdates(); db.RefreshFolder(parent); }
/********************************** * * Create a tree file that is usable by pisces * * input : riverware rdf file * output : comma seperated tree file. * * * // example portion of input file. * (this input file is using snapshots in riverware) * ----------------------------------- * object_type: SnapShotObj * object_name: Most Likely 2 * slot_name: Andrews Gage 12447390 at RM 3_5_Gage Outflow * END_SLOT_PREAMBLE * units: cfs * * // example output. There is no nesting of tree levels for now. * RiverwareName,Description,RiverwareDataType,Level,Units * Riverware Results,,,0, * Yakima River at Parker PARW,Yakima River at Parker PARW,Gage Outflow,1,cfs * Yakima River at Grandview,Yakima River at Grandview,Gage Inflow,1,cfs * ... * **********************************/ public static void AddRiverWareFileToDatabase(string rdfFilename, PiscesFolder parent, TimeSeriesDatabase db) { Reclamation.Core.TextFile tf = new Reclamation.Core.TextFile(rdfFilename); #region notes /* * SnapShotStyle... * ------------------------------------- * 2001-9-29 24:00 * 2001-9-30 24:00 * object_type: SnapShotObj * object_name: Most Likely 2 ### scernario name * slot_name: Andrews Gage 12447390 at RM 3_5_Gage Outflow # object_name slotName are combined. * END_SLOT_PREAMBLE * units: cfs * * Regular Style ... * --------------------------------------- * END_COLUMN * END_SLOT * object_type: StreamGage * object_name: Yakima 202_0 at Easton EASW * slot_name: Gage Outflow * END_SLOT_PREAMBLE * units: cfs * scale: 1 * */ #endregion int number_of_runs = LookupNumberOfRuns(tf); PiscesFolder folder = parent; if (number_of_runs == 1) { folder = db.AddFolder(parent, Path.GetFileNameWithoutExtension(rdfFilename)); } int sz = tf.Length; // object_type and object_name should occur on consecutive lines. int index = tf.IndexOfBoth("object_name:", "slot_name:", 0); var objectList = new List <string>(); //list to avoid duplicates in tree Performance p1 = new Performance(); Performance p2 = new Performance(); p2.Pause(); int counter = 0; db.SuspendTreeUpdates(); var sc = db.GetSeriesCatalog(); Dictionary <string, int> objTypeID = new Dictionary <string, int>(); Dictionary <string, int> objNameID = new Dictionary <string, int>(); while (index < sz && index > 0) { //slot_name: Andrews Gage 12447390 at RM 3_5_Gage Outflow string slot_name = tf[index + 1].Substring(11); //Andrews Gage 12447390 at RM 3_5_Gage Outflow string object_type = tf[index - 1].Substring(13); string object_name = tf[index].Substring(13); string units = tf[index + 3].Substring(6).Trim(); string tag = object_name + ":" + slot_name; if (!objectList.Contains(tag)) { int scenarioNumber = -1; if (number_of_runs > 1) { scenarioNumber = 1; } RiverWareSeries s; if (object_type == "SnapShotObj") { s = new RiverWareSeries(rdfFilename, "", slot_name, scenarioNumber, true, units); } else { s = new RiverWareSeries(rdfFilename, object_name, slot_name, scenarioNumber, false, units); } s.Units = units; s.ConnectionString = ConnectionStringUtility.MakeFileNameRelative(s.ConnectionString, db.DataSource); p2.Continue(); if (object_type.Contains("Reservoir")) { object_type = "Reservoir"; } else if (object_type.Contains("Reach")) { object_type = "Reach"; } else if (object_type.Contains("Diversion")) { object_type = "Diversion"; } else if (object_type.Contains("Canal")) { object_type = "Canal"; } int id = sc.NextID(); if (!sc.FolderExists(object_type, folder.ID)) { objTypeID.Add(object_type, id); sc.AddFolder(object_type, id, folder.ID); id++; } if (!sc.FolderExists(object_name, objTypeID[object_type])) { objNameID.Add(object_name, id); sc.AddFolder(object_name, id, objTypeID[object_type]); id++; } sc.AddSeriesCatalogRow(s, id, objNameID[object_name], ""); objectList.Add(tag); } index = tf.IndexOfBoth("object_name:", "slot_name:", index + 2); counter++; } p1.Report("total"); p2.Report("db.add()"); //398.7732813 seconds elapsed. total //384.6792607 seconds elapsed. db.add() // disable tree refresh (doubles perf) // 255.9736646 seconds elapsed. total // 241.7702669 seconds elapsed. db.add() // implemented member ExternalDataSource //34.8756696 seconds elapsed. total //20.3753912 seconds elapsed. db.add() var convention = Reclamation.TimeSeries.RiverWare.ImportRiverWare.ScenarioConvention.Default; if (number_of_runs > 1) // Multiple runs. { // show dialog to allow water year naming or traces var dlg = new Reclamation.TimeSeries.RiverWare.ImportRiverWare(); if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK) { convention = dlg.NamingConvention; } // Add Scenarios. var tblScen = db.GetScenarios(); for (int i = 0; i < number_of_runs; i++) { string name = "Run" + i; if (convention == RiverWare.ImportRiverWare.ScenarioConvention.ByYear) { name = (dlg.FirstYear + i).ToString(); } //string scenarioPath = ConnectionStringUtility.MakeFileNameRelative("FileName=" + item, DB.Filename); tblScen.AddScenarioRow(name, true, "ScenarioNumber=" + (i + 1).ToString()); } db.Server.SaveTable(tblScen); } db.Server.SaveTable(sc); db.ResumeTreeUpdates(); db.RefreshFolder(parent); }