public void DoClean() { if (IsTimeToClean == false) { return; } m_timer.Reset(); string msg = $"Deleting old daily file(s) in {Filename.Directory} older than {DaysToKeep} days"; Stopwatch watch = Stopwatch.StartNew(); DirectoryCleaner cleaner = new DirectoryCleaner(Filename.Directory, "*.json"); cleaner.Clean(fi => { int age = (int)((DateTime.Now - fi.CreationTime).TotalDays); //log.LogInformation($"{fi.FullName} is {age} days old"); bool to_delete = age >= DaysToKeep; if (to_delete) { msg += $"\nDeleting {fi.FullName} -- it is {age} days old"; } return(to_delete); }); msg += $"\n\nDone deleting old daily file(s) -- it took {watch.ElapsedMilliseconds} ms"; logging.EventLog log = new ApplicationEventLog(); log.LogInformation(msg); }
private void CheckForConfigurationChanges(DataStorage storage, SQLiteConnection conn) { Database db = new Database(); DateTimeOffset?configuration_update = db.GetLastConfigurationUpdateAttribute(conn); if (m_last_configuration_update == DateTimeOffset.MinValue || (configuration_update.HasValue && configuration_update.Value != m_last_configuration_update)) { if (configuration_update.HasValue) { m_last_configuration_update = configuration_update.Value; } logging.EventLog elog = new ApplicationEventLog(); elog.LogInformation("Loading configuration from database"); //db.Initialize(); SystemConfiguration config = SystemConfigurationStore.Get(false, conn); m_system_device = new SystemDevice(config, storage); DeleteDays delete_days = new DeleteDays(); int? days = delete_days.GetValueAsInt(conn); m_days_to_keep = days ?? 180; m_daily_file_cleaner.DaysToKeep = m_days_to_keep; } }
public void Startup() { if (m_thread != null) { throw new Exception("Startup: Starting while already running"); } logging.EventLog elog = new ApplicationEventLog(); elog.LogInformation("Starting"); GlobalIsRunning.IsRunning = true; m_shutdown.Reset(); elog.LogInformation("Initializing database"); Stopwatch watch = Stopwatch.StartNew(); Database db = new Database(); new Initializer(null).Initialize(db); elog.LogInformation($"Database initialization took {watch.ElapsedMilliseconds} ms"); using (SQLiteConnection conn = db.Connection) { conn.Open(); db.Attribute attr = new db.Attribute(); attr.Set("service.startup_time", DateTimeOffset.Now.ToString("o"), conn); string assembly_ver = Assembly.GetExecutingAssembly().GetName().Version.ToString(); attr.Set("software.version", assembly_ver, conn); } elog.LogInformation("Setting up responders"); m_responders.ForEach(r => RequestBus.Instance.Subscribe(r)); elog.LogInformation("Starting web server"); watch.Restart(); m_host.Start(); elog.LogInformation($"Web server startup took {watch.ElapsedMilliseconds} ms"); elog.LogInformation("Starting work thread"); watch.Restart(); m_thread = new Thread(new ThreadStart(ThreadFunc)); m_thread.Start(); // Wait for the thread to start while (!m_thread.IsAlive) { Thread.Sleep(25); } elog.LogInformation($"Work thread startup took {watch.ElapsedMilliseconds} ms"); elog.LogInformation("Completed startup"); }
public void CleanOldData(int days_to_keep, SQLiteConnection conn) { if (GlobalIsRunning.IsRunning == false) { return; } try { DateTimeOffset dt = DateTimeOffset.Now - TimeSpan.FromDays(days_to_keep); string dt_as_8601 = dt.DayBeginAs8601(); //log.LogInformation("Deleting old data before " + dt_as_8601)); Stopwatch watch = Stopwatch.StartNew(); Deleter deleter = new Deleter("NetworkStatus", $"DatePingAttempted < '{dt_as_8601}'", conn); deleter.Execute(); // To keep a single delete from taking too long, let's just do 100 at a time. int count = 100; string clause = $"DataID IN (SELECT DataID FROM Data WHERE TimeStamp < '{dt_as_8601}' ORDER BY TimeStamp ASC LIMIT {count})"; if (GlobalIsRunning.IsRunning == true) { deleter = new Deleter("MostRecentDataPerCollector", clause, conn); deleter.Execute(); } if (GlobalIsRunning.IsRunning == true) { deleter = new Deleter("Data", clause, conn); deleter.Execute(); } if (GlobalIsRunning.IsRunning == true) { // Don't delete from the DeviceStatus table if the status is still valid. Only ones // that have been superceded should be deleted. clause = $"Date < '{dt_as_8601}' AND IsValid = 0"; deleter = new Deleter("DeviceStatus", clause, conn); deleter.Execute(); } watch.Stop(); long elapsed = watch.ElapsedMilliseconds; if (elapsed > 1000) { logging.EventLog elog = new ApplicationEventLog(); elog.LogInformation($"Finished deleting old data prior to {dt_as_8601}. It took {elapsed} ms"); } } catch (Exception ex) { ILog log = LogManager.GetLogger(typeof(Database)); log.Error("CleanOldData:"); log.Error(ex); } }
public void Shutdown() { logging.EventLog elog = new ApplicationEventLog(); elog.LogInformation("Stopping"); Stopwatch watch = Stopwatch.StartNew(); GlobalIsRunning.IsRunning = false; m_shutdown.Set(); while (m_thread != null && m_thread.ThreadState == System.Threading.ThreadState.Running) { Thread.Sleep(100); } elog.LogInformation($"Stopping worker thread took {watch.ElapsedMilliseconds} ms"); Database db = new Database(); using (SQLiteConnection conn = db.Connection) { conn.Open(); db.Attribute attr = new db.Attribute(); attr.Set("service.stop_time", DateTimeOffset.Now.ToString("o"), conn); } elog.LogInformation("Stopping web server"); m_host.Stop(); m_host.Dispose(); elog.LogInformation("Clearing responders"); m_responders.ForEach(r => RequestBus.Instance.Unsubscribe(r)); m_thread = null; elog.LogInformation("Completed stopping"); }
public void BeingCollected(long collector_id, bool is_being_collected) { try { Updater updater = new Updater("Collectors", $"CollectorID = {collector_id}", Conn); updater.Set("CurrentlyBeingCollected", is_being_collected ? 1 : 0); updater.Execute(); } catch (Exception e) { logging.EventLog log = new ApplicationEventLog(); log.LogInformation($"Error in BeingCollected({collector_id}, {is_being_collected})"); log.Log(e); } }
public CollectorInfo CollectNow(long collector_id) { CollectorInfo collector_info = null; try { Updater updater = new Updater("Collectors", $"CollectorID = {collector_id}", Conn); // Set the time to null so it will rise to the top of the to-do list. updater.SetNull("NextCollectionTime"); updater.Execute(); collector_info = GetCollectorInfo(collector_id); } catch (Exception e) { logging.EventLog log = new ApplicationEventLog(); log.LogInformation($"Error in CollectNow({collector_id})"); log.Log(e); } return(collector_info); }
public void DoWrite(SQLiteConnection conn) { if (GlobalIsRunning.IsRunning == false) { return; } if (IsTimeToWrite == false) { return; } m_timer.Reset(); logging.EventLog log = new ApplicationEventLog(); Stopwatch watch = Stopwatch.StartNew(); List <DailyReport> reports = new Retriever().GetIncompleteDailyReports(true, conn); string directory = Filename.Directory; log.LogInformation($"Writing {reports.Count} daily file(s)"); JsonSerializer serializer = new JsonSerializer(); foreach (DailyReport report in reports) { Stopwatch watch2 = Stopwatch.StartNew(); Filename f = new Filename(report.countryCode, report.siteName, report.day); string filename_base = directory + "\\" + f.Name; string filename_json = filename_base + ".json"; using (StreamWriter sw = new StreamWriter(filename_json)) using (JsonWriter writer = new JsonTextWriter(sw)) { writer.Formatting = Formatting.Indented; serializer.Serialize(writer, report); } CompressDailyFile compress = new CompressDailyFile(); bool?do_compress = compress.GetValueAsBoolean(conn); if (do_compress.HasValue && do_compress.Value) { try { using (ZipFile zip = new ZipFile()) { ZipEntry entry = zip.AddFile(filename_json); entry.FileName = f.Name + ".json"; // Note that we save the filename as zip.json, and not the more // likely json.zip. // // This is because the daily-file-upload-tool just uploads files from // COMMON or whoever else might be generating daily files, and the back-end // processing of those uploaded files uses the extension to do additional processing. // We don't want special processing for compressed JSON files and uncompressed JSON files, // so we use an unconventional naming convention. zip.Save(filename_base + ".zip.json"); } // Optionally delete the original JSON file once it's compressed DeleteDailyFileAfterCompression del = new DeleteDailyFileAfterCompression(); bool?do_delete = del.GetValueAsBoolean(conn); if (do_delete.HasValue && do_delete.Value) { File.Delete(filename_json); } } catch (Exception e) { log.LogError(e.Message); } } log.LogInformation($"Writing {filename_json} took {watch2.ElapsedMilliseconds} ms"); } watch.Stop(); log.LogInformation($"Done writing {reports.Count} daily file(s). It took {watch.ElapsedMilliseconds} ms"); }