public Watershed Read(string watershedName, DateTime startDate, DateTime endDate) { if (!ValidDates(startDate, endDate)) { return(null); } var output = new Watershed(watershedName); DateTime t = startDate; while (t <= endDate) { // Seems threadsafe at a glance var csv = Read(watershedName, t); if (csv != null) { foreach (string locName in csv.LocationNames) { Forecast f = output.AddForecast(locName, t, csv.GetEnsemble(locName), csv.TimeStamps); f.TimeStamps = csv.TimeStamps; } } t = t.AddDays(1); } return(output); }
public Watershed ReadParallel(string watershedName, DateTime startDate, DateTime endDate) { if (!ValidDates(startDate, endDate)) { return(null); } var output = new Watershed(watershedName); // Each forecast is one day int numTotal = (int)Math.Round((endDate - startDate).TotalDays) + 1; Parallel.For(0, numTotal, i => { DateTime day = startDate.AddDays(i); var csv = Read(watershedName, day); if (csv != null) { lock (output) { foreach (string locName in csv.LocationNames) { Forecast f = output.AddForecast(locName, day, csv.GetEnsemble(locName), csv.TimeStamps); f.TimeStamps = csv.TimeStamps; } } } }); return(output); }
public static Watershed Read(H5Reader h5r, string watershedName) { string root = Path(H5Reader.Root, "Watersheds", watershedName); long[] dtTicks = null; float[,] data = null; Watershed retn = new Watershed(watershedName); var locationNames = h5r.GetGroupNames(root); foreach (var loc in locationNames) { var forecastNames = h5r.GetGroupNames(Path(root, loc)); foreach (var forecastDate in forecastNames) { //Watersheds/EastSierra/BCAC1/2013_307 string forecastPath = Path(root, loc, forecastDate); if (!TryParseIssueDate(forecastDate, out DateTime issueDate)) { Console.WriteLine("ERROR IN HDF5 PATH: " + forecastPath); continue; } h5r.ReadDataset(Path(forecastPath, "Times"), ref dtTicks); h5r.ReadDataset(Path(forecastPath, "Values"), ref data); var _times = dtTicks.Select(t => new DateTime(t)).ToArray(); retn.AddForecast(loc, issueDate, data, _times); } } return(retn); }
public static Watershed Read(string watershedName, DateTime start, DateTime end, string dssPath) { Watershed rval = new Watershed(watershedName); // DssReader.UseTrainingWheels = false; using (DssReader dss = new DssReader(dssPath, DssReader.MethodID.MESS_METHOD_GENERAL_ID, DssReader.LevelID.MESS_LEVEL_NONE)) { Console.WriteLine("Reading " + dssPath); DssPathCollection dssPaths = dss.GetCatalog(); // sorted int size = dssPaths.Count; if (size == 0) { throw new Exception("Empty DSS catalog"); } // /RUSSIANNAPA/APCC1/FLOW/01SEP2019/1HOUR/C:000002|T:0212019/ var seriesList = new List <Hec.Dss.TimeSeries>(); for (int i = 0; i < size; i++) { if (i % 100 == 0) { Console.Write("."); } DssPath path = dssPaths[i]; string location = path.Bpart; float[,] ensemble = null; ParseFPart(path.Fpart, out int memberidx, out DateTime issueDate); if (issueDate >= start && issueDate <= end && string.Equals(path.Apart, watershedName, StringComparison.OrdinalIgnoreCase)) { // Passing in 'path' (not the dateless string) is important, path without date triggers a heinous case in the dss low-level code var ts = dss.GetTimeSeries(path); if (NextForecast(seriesList, ts) || i == size - 1) { if (i == size - 1) { seriesList.Add(ts); } ConvertListToEnsembleArray(seriesList, ref ensemble); rval.AddForecast(path.Bpart, issueDate, ensemble, ts.Times); seriesList.Clear(); } seriesList.Add(ts); } } } return(rval); }
public Watershed CloneSubset(int takeCount) { int count = 0; var retn = new Watershed(this.Name); foreach (Location loc in this.Locations) { foreach (Forecast f in loc.Forecasts) { retn.AddForecast(f.Location.Name, f.IssueDate, f.Ensemble, f.TimeStamps); count++; if (count >= takeCount) { break; } } } return(retn); }
public static Watershed ReadTimeSeriesProfiles(string watershedName, DateTime start, DateTime end, string dssFileName) { Watershed rval = new Watershed(watershedName); float[,] profile = null; using (DssReader dss = new DssReader(dssFileName, DssReader.MethodID.MESS_METHOD_GENERAL_ID, DssReader.LevelID.MESS_LEVEL_NONE)) { Console.WriteLine("Reading" + dssFileName); DssPathCollection dssPaths = dss.GetCatalog(); // sorted // var dssPaths = rawDssPaths.OrderBy(a => a, new PathComparer()).ToArray(); // sorted int size = dssPaths.Count(); if (size == 0) { throw new Exception("Empty DSS catalog"); } // /RUSSIANNAPA/APCC1/FLOW/01SEP2019/1HOUR/|T:0212019/ for (int i = 0; i < size; i++) { if (i % 100 == 0) { Console.Write("."); } DssPath path = dssPaths[i]; DateTime issueDate = ParseIssueDate(path.Fpart); if (issueDate >= start && issueDate <= end && path.Apart == watershedName) { var ts = dss.GetTimeSeriesProfile(path); ArrayUtility.TransposeDoubleToFloat(ts.Values, ref profile); rval.AddForecast(path.Bpart, issueDate, profile, ts.Times); } } } return(rval); }
public static Watershed Read(string watershedName, DateTime startTime, DateTime endTime, string fileName) { SQLiteServer server = GetServer(fileName); var rval = new Watershed(watershedName); var sql = "select * from " + TableName + " WHERE issue_date >= '" + startTime.ToString(DateTimeFormat) + "' " + " AND issue_date <= '" + endTime.ToString(DateTimeFormat) + "' " + " AND watershed = '" + watershedName + "' "; sql += " order by watershed,issue_date,location_name"; var table = server.Table(TableName, sql); if (table.Rows.Count == 0) { throw new Exception("no data"); } DateTime prevIssueDate = Convert.ToDateTime(table.Rows[0]["issue_date"]); DateTime currentDate = Convert.ToDateTime(table.Rows[0]["issue_date"]); float[,] values = null; foreach (DataRow row in table.Rows) { currentDate = Convert.ToDateTime(row["issue_date"]); var times = GetTimes(row); GetValues(row, ref values); rval.AddForecast(row["location_name"].ToString(), currentDate, values, times); } return(rval); }