private DOWNLOADFILESTATUS GetUpdateFromURL(string url, string zipfilename, string filename) { AutoDecompressionWebClient cl = new AutoDecompressionWebClient(); DOWNLOADFILESTATUS status = cl.DownloadFileIfNewer(url, zipfilename, true, true, Password); return(status); }
private bool ReadAircraftsFromURL(string url, string filename) { try { AutoDecompressionWebClient cl = new AutoDecompressionWebClient(); DOWNLOADFILESTATUS status = cl.DownloadFileIfNewer(url, filename, true, true, Password); if (((status & DOWNLOADFILESTATUS.ERROR) > 0) && ((status & DOWNLOADFILESTATUS.ERROR) > 0)) { this.ReportProgress(-1, "Error while downloading and extracting " + filename); return(false); } else if (((status & DOWNLOADFILESTATUS.NEWER) > 0) || ((status & DOWNLOADFILESTATUS.NOTNEWER) > 0)) { string json = ""; using (StreamReader sr = new StreamReader(filename)) json = sr.ReadToEnd(); List <AirScout.Aircrafts.AircraftDesignator> ads = AircraftData.Database.AircraftFromJSON(json); // check for invalid entries foreach (AircraftDesignator ad in ads) { if (String.IsNullOrEmpty(ad.Call)) { ad.Call = "[unknown]"; } if (String.IsNullOrEmpty(ad.Reg)) { ad.Reg = "[unknown]"; } if (String.IsNullOrEmpty(ad.TypeCode)) { ad.TypeCode = "[unknown]"; } } // check for empty database if (AircraftData.Database.AircraftCount() == 0) { // do bulk insert AircraftData.Database.AircraftBulkInsert(ads); } else { // do bulk update AircraftData.Database.AircraftBulkInsertOrUpdateIfNewer(ads); } return(true); } } catch (Exception ex) { // Error loading database this.ReportProgress(-1, "[" + url + "]: " + ex.ToString()); } return(false); }
public void DownloadTile(SRTM1TileDesignator tile) { if (tile == null) { return; } if (String.IsNullOrEmpty(tile.URL)) { return; } AutoDecompressionWebClient cl = new AutoDecompressionWebClient(); string filename = Path.GetFileName(tile.URL.Substring(tile.URL.LastIndexOf('/'))); cl.DownloadFile(tile.URL, Path.Combine(Properties.Settings.Default.SRTM1_DataPath, filename)); }
private bool ReadLocationsFromURL(string url, string filename) { try { AutoDecompressionWebClient cl = new AutoDecompressionWebClient(); DOWNLOADFILESTATUS status = cl.DownloadFileIfNewer(url, filename, true, true); if ((status & DOWNLOADFILESTATUS.ERROR) > 0) { Log.WriteMessage("Error while downloading and extracting " + filename, LogLevel.Error); return(false); } else if (((status & DOWNLOADFILESTATUS.NEWER) > 0) || ((status & DOWNLOADFILESTATUS.NOTNEWER) > 0)) { string json = ""; using (StreamReader sr = new StreamReader(filename)) json = sr.ReadToEnd(); List <LocationDesignator> lds = StationData.Database.LocationFromJSON(json); // chek for empty database if (StationData.Database.LocationCount() == 0) { // do bulk insert StationData.Database.LocationBulkInsert(lds); } else { // do update on single elements foreach (LocationDesignator ld in lds) { StationData.Database.LocationInsertOrUpdateIfNewer(ld); // return if cancellation is pending if (bw_DatabaseUpdater.CancellationPending) { return(false); } } } return(true); } } catch (Exception ex) { // Error loading database Log.WriteMessage("[" + url + "]: " + ex.ToString(), LogLevel.Error); } return(false); }
private void ReadAircraftsFromURL(string url, string filename) { string json = ""; try { AutoDecompressionWebClient cl = new AutoDecompressionWebClient(); cl.DownloadFileIfNewer(url, filename, true); if (!File.Exists(filename)) { return; } // deserialize JSON file JObject o = (JObject)JsonConvert.DeserializeObject(json); // clear collections JArrays.Clear(); JProperties.Clear(); // parse all child tokens recursively --> can be either a property or an array ParseToken(o); // we've got all properties and arrays here // store array values in DataTable DataTableAircrafts dt = new DataTableAircrafts(); foreach (KeyValuePair <string, JArray> a in JArrays) { DataRow row = dt.NewRow(); row[0] = a.Value[0].ToString(); row[1] = a.Value[1].ToString(); row[2] = a.Value[2].ToString(); row[3] = a.Value[6].ToString(); dt.Rows.Add(row); } AircraftDatabase_old.InsertOrUpdateTable(dt); } catch (Exception ex) { // Error loading database Console.WriteLine(System.Reflection.MethodBase.GetCurrentMethod().ToString() + "[" + url + "]: " + ex.Message); } }
public ElevationCatalogue(BackgroundWorker caller, string baseurl, string basedir, double minlat, double minlon, double maxlat, double maxlon) { BaseURL = baseurl; if (!BaseURL.EndsWith("/")) { BaseURL = BaseURL + "/"; } BaseDir = basedir; MinLat = minlat; MinLon = minlon; MaxLat = maxlat; MaxLon = maxlon; // build web catalogue first // check and download catalogue file string url = BaseURL + "files.zip"; string zipfilename = Path.Combine(BaseDir, "files.zip"); string filename = Path.Combine(BaseDir, "files.cat"); // fill a dictionary with needed squares List <string> sq = ElevationData.Database.GetLocsFromRect(minlat, minlon, maxlat, maxlon, 2); SortedDictionary <string, string> squares = new SortedDictionary <string, string>(); foreach (string s in sq) { squares.Add(s, null); } // report progress if (caller != null) { if (caller.WorkerReportsProgress) { caller.ReportProgress(0, "Downloading elevation tile catalogue from web (please wait)..."); } } // get locs catalogue from web AutoDecompressionWebClient client = new AutoDecompressionWebClient(); client.DownloadFileIfNewer(url, zipfilename, true, true); if (File.Exists(filename)) { // get LastModified from catalogue LastModified = File.GetLastWriteTimeUtc(filename); // read catalogue and fill LastUpdated timestamp using (StreamReader sr = new StreamReader(File.OpenRead(filename))) { Stopwatch st = new Stopwatch(); st.Start(); int i = 0; while (!sr.EndOfStream) { string s = sr.ReadLine(); try { if (!String.IsNullOrEmpty(s) && !s.StartsWith("/")) { string[] a = s.Split(';'); DateTime lastupdated; string square = a[0].Substring(0, 4).ToUpper(); string dummy; if (caller != null) { if ((caller.WorkerReportsProgress) && (i % 1000 == 0)) { caller.ReportProgress(0, "Updating elevation tile information [" + i.ToString() + "], please wait..."); } } if (squares.TryGetValue(square, out dummy)) { if (!this.Files.TryGetValue(a[0], out lastupdated)) { this.Files.Add(a[0], DateTime.ParseExact(a[1], "yyyy-MM-dd HH:mm:ssZ", CultureInfo.InvariantCulture).ToUniversalTime()); } } } } catch (Exception ex) { } i++; } st.Stop(); Console.WriteLine("Reading catalogue: " + st.ElapsedMilliseconds.ToString() + " ms."); } } }
// TRICKY: process a single elevation tile. // does check the database for elevation tile is already inside // returns true + elevation tile to collect tile for bulk insert // return true + null if elevation tile found and updated // returns false + null in case of errors private bool UpdateElevationTileFromURL(string tilename, DateTime lastupdated, ELEVATIONMODEL model, out ElevationTileDesignator tile) { try { // this.ReportProgress((int)DATABASESTATUS.UPDATING, "Processing " + tilename + "..."); DateTime tilelastupdated = ElevationData.Database.ElevationTileFindLastUpdated(new ElevationTileDesignator(tilename.Substring(0, 6).ToUpper()), model); // elevation tile found --> tilelastupdated contains timedstamp // elevation tile not found --> tilelastupdated = DateTime.MinValue TimeSpan diff = lastupdated - tilelastupdated; // check if catalogue tile is newer if (diff.TotalMinutes > 5) { // download elevation zip file and unzip string square = tilename.Substring(0, 4).ToUpper(); string zipfilename = Path.Combine(ElevationData.Database.DefaultDatabaseDirectory(model), square + ".zip"); string zipurl = ElevationData.Database.UpdateURL(model) + "/" + tilename.Substring(0, 2) + "/" + tilename.Substring(0, 4) + ".zip"; string filename = Path.Combine(ElevationData.Database.DefaultDatabaseDirectory(model), tilename); if (!File.Exists(filename)) { this.ReportProgress(0, StartOptions.Name + ": downloading " + Path.GetFileName(zipfilename) + "..."); try { // download zipfile if newer AutoDecompressionWebClient client = new AutoDecompressionWebClient(); DOWNLOADFILESTATUS status = client.DownloadFileIfNewer(zipurl, zipfilename, true, true); } catch (Exception ex) { this.ReportProgress(-1, ex.ToString()); try { // try to delete zip file anyway File.Delete(zipfilename); } catch { } } try { // delete zipfile if cache is disabled if (!StartOptions.FileCacheEnabled) { File.Delete(zipfilename); } } catch (Exception ex) { this.ReportProgress(-1, "Error deleting zipfile [" + zipfilename + "]: " + ex.ToString()); } // new zip file extracted, assuming that the remaining *.loc files are orphans // --> try to delete everything but current square and catalogue // cleanup all *.loc files foreach (string f in Directory.EnumerateFiles(ElevationData.Database.DefaultDatabaseDirectory(model), "*.loc")) { try { if (!f.Contains(square)) { File.Delete(f); } } catch (Exception ex) { this.ReportProgress(-1, "Error deleting locfile [" + f + "]: " + ex.ToString()); } } } // wait at last 60sec for file is being unzipped or throw FileNotFOundException if not // unzip procedure is sometimes returning the results with delay int timeout = 0; while (!File.Exists(filename)) { if (timeout > 600) { throw new FileNotFoundException("Elevation file not found. " + filename); } Thread.Sleep(100); if (this.CancellationPending) { break; } timeout++; } string json = ""; using (StreamReader sr = new StreamReader(File.OpenRead(filename))) { json = sr.ReadToEnd(); } if (!string.IsNullOrEmpty(json)) { JsonSerializerSettings settings = new JsonSerializerSettings(); settings.DateTimeZoneHandling = DateTimeZoneHandling.Utc; settings.FloatFormatHandling = FloatFormatHandling.String; settings.Formatting = Newtonsoft.Json.Formatting.Indented; tile = JsonConvert.DeserializeObject <ElevationTileDesignator>(json, settings); // perform a single update if elevation tile was already found in database if (tilelastupdated != DateTime.MinValue) { ElevationData.Database.ElevationTileUpdate(tile, model); tile = null; } // return tile to be collected for bulk insert in main procedure return(true); } File.Delete(filename); } else { // tile found and up to date --> nothing to do tile = null; return(true); } } catch (Exception ex) { this.ReportProgress(-1, ex.ToString() + ": tilename=" + tilename); } tile = null; return(false); }