public void ProcessDetectedNewFiles(List <EDJournalReader> readersToUpdate, Action <int, string> updateProgress, Action <JournalEntry> fireback = null) { //System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF"), "Ready to update"); for (int i = 0; i < readersToUpdate.Count; i++) { EDJournalReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); //System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF"), i + " read "); reader.ReadJournal(out List <JournalReaderEntry> entries, out List <UIEvent> uievents, historyrefreshparsing: true, resetOnError: true); // this may create new commanders, and may write to the TLU db UserDatabase.Instance.ExecuteWithDatabase(cn => { if (entries.Count > 0) { if (fireback != null) { foreach (JournalReaderEntry jre in entries) { fireback(jre.JournalEntry); } } else { ILookup <DateTime, JournalEntry> existing = JournalEntry.GetAllByTLU(reader.TravelLogUnit.id, cn.Connection).ToLookup(e => e.EventTimeUTC); //System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF"), i + " into db"); using (DbTransaction tn = cn.Connection.BeginTransaction()) { foreach (JournalReaderEntry jre in entries) { JObject jsonofentry = jre.Json; if (!existing[jre.JournalEntry.EventTimeUTC].Any(e => JournalEntry.AreSameEntry(jre.JournalEntry, e, cn.Connection, ent1jo: jsonofentry))) { jre.JournalEntry.Add(jsonofentry, cn.Connection, tn); //System.Diagnostics.Trace.WriteLine(string.Format("Write Journal to db {0} {1}", jre.JournalEntry.EventTimeUTC, jre.JournalEntry.EventTypeStr)); } } tn.Commit(); } } } reader.TravelLogUnit.Update(cn.Connection); updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); lastnfi = reader; }); } updateProgress(-1, ""); }
// Called by ScanForNewEntries (from EDJournalClass Scan Tick Worker) to scan a NFI for new entries private void ScanReader(EDJournalReader nfi, List <JournalEntry> entries, List <UIEvent> uientries) { int netlogpos = 0; try { if (nfi.TravelLogUnit.id == 0) { nfi.TravelLogUnit.type = TravelLogUnit.JournalType; nfi.TravelLogUnit.Add(); } netlogpos = nfi.TravelLogUnit.Size; bool readanything = nfi.ReadJournal(out List <JournalEntry> ents, out List <UIEvent> uie, historyrefreshparsing: false, resetOnError: false); uientries.AddRange(uie); if (readanything) // if we read, we must update the travel log pos { //System.Diagnostics.Debug.WriteLine("ScanReader " + Path.GetFileName(nfi.FileName) + " read " + ents.Count + " ui " +uientries.Count + " size " + netlogpos); UserDatabase.Instance.ExecuteWithDatabase(cn => { using (DbTransaction txn = cn.Connection.BeginTransaction()) { ents = ents.Where(jre => JournalEntry.FindEntry(jre, cn, jre.GetJson()).Count == 0).ToList(); foreach (JournalEntry jre in ents) { entries.Add(jre); jre.Add(jre.GetJson(), cn.Connection, txn); } //System.Diagnostics.Debug.WriteLine("Wrote " + ents.Count() + " to db and updated TLU"); nfi.TravelLogUnit.Update(cn.Connection); txn.Commit(); } }); } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Exception " + ex.Message); // Revert and re-read the failed entries if (nfi != null && nfi.TravelLogUnit != null) { nfi.TravelLogUnit.Size = netlogpos; } throw; } }
// Called by ScanForNewEntries (from EDJournalClass Scan Tick Worker) to scan a NFI for new entries private void ScanReader(List <JournalEntry> entries, List <UIEvent> uientries) { System.Diagnostics.Debug.Assert(lastnfi.ID != 0); // must have committed it at this point, prev code checked for it but it must have been done System.Diagnostics.Debug.Assert(netlogreaders.ContainsKey(lastnfi.FullName)); // must have added to netlogreaders.. double check bool readanything = lastnfi.ReadJournal(entries, uientries, historyrefreshparsing: false); if (StoreToDBDuringUpdateRead) { if (entries.Count > 0 || readanything) { UserDatabase.Instance.ExecuteWithDatabase(cn => { System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch(); sw.Start(); using (DbTransaction txn = cn.Connection.BeginTransaction()) { if (entries.Count > 0) { entries = entries.Where(jre => JournalEntry.FindEntry(jre, cn, jre.GetJson(cn.Connection, txn)).Count == 0).ToList(); foreach (JournalEntry jre in entries) { var json = jre.GetJson(cn.Connection, txn); jre.Add(json, cn.Connection, txn); } } lastnfi.TravelLogUnit.Update(cn.Connection, txn); // update TLU pos txn.Commit(); } if (sw.ElapsedMilliseconds >= 50) // this is written to the log to try and debug bad DB behaviour { System.Diagnostics.Trace.WriteLine("Warning access to DB to write new journal entries slow " + sw.ElapsedMilliseconds); foreach (var e in entries) { System.Diagnostics.Trace.WriteLine(".." + e.EventTimeUTC + " " + e.EventTypeStr); } } }); } } else { if (readanything) { lastnfi.TravelLogUnit.Update(); } } //if ( entries.Count()>0 || uientries.Count()>0) System.Diagnostics.Debug.WriteLine("ScanRead " + entries.Count() + " " + uientries.Count()); }
// Called by ScanForNewEntries (from EDJournalClass Scan Tick Worker) to scan a NFI for new entries private void ScanReader(EDJournalReader nfi, List <JournalEntry> entries) { int netlogpos = 0; try { if (nfi.TravelLogUnit.id == 0) { nfi.TravelLogUnit.type = 3; nfi.TravelLogUnit.Add(); } netlogpos = nfi.TravelLogUnit.Size; bool readanything = nfi.ReadJournal(out List <JournalReaderEntry> ents, historyrefreshparsing: false, resetOnError: false); //System.Diagnostics.Debug.WriteLine("ScanReader " + Path.GetFileName(nfi.FileName) + " read " + ents.Count + " size " + netlogpos); if (readanything) // if we read, we must update the travel log pos { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { using (DbTransaction txn = cn.BeginTransaction()) { ents = ents.Where(jre => JournalEntry.FindEntry(jre.JournalEntry, jre.Json).Count == 0).ToList(); foreach (JournalReaderEntry jre in ents) { entries.Add(jre.JournalEntry); jre.JournalEntry.Add(jre.Json, cn, txn); ticksNoActivity = 0; } System.Diagnostics.Debug.WriteLine("Wrote " + ents.Count() + " to db and updated TLU"); nfi.TravelLogUnit.Update(cn); txn.Commit(); } } } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Exception " + ex.Message); // Revert and re-read the failed entries if (nfi != null && nfi.TravelLogUnit != null) { nfi.TravelLogUnit.Size = netlogpos; } throw; } }
public void ParseJournalFiles(Func <bool> cancelRequested, Action <int, string> updateProgress, bool forceReload = false) { // System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF", true), "Scanned " + WatcherFolder); Dictionary <string, TravelLogUnit> m_travelogUnits = TravelLogUnit.GetAll().Where(t => (t.type & 0xFF) == 3).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(WatcherFolder, journalfilematch, SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <EDJournalReader> readersToUpdate = new List <EDJournalReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, m_travelogUnits); // open it if (!m_travelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { m_travelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.type = 3; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } //System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF"), "Ready to update"); for (int i = 0; i < readersToUpdate.Count; i++) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { EDJournalReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); //System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF"), i + " read "); reader.ReadJournal(out List <JournalReaderEntry> entries, historyrefreshparsing: true, resetOnError: true); // this may create new commanders, and may write to the TLU db ILookup <DateTime, JournalEntry> existing = JournalEntry.GetAllByTLU(reader.TravelLogUnit.id).ToLookup(e => e.EventTimeUTC); //System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF"), i + " into db"); using (DbTransaction tn = cn.BeginTransaction()) { foreach (JournalReaderEntry jre in entries) { if (!existing[jre.JournalEntry.EventTimeUTC].Any(e => JournalEntry.AreSameEntry(jre.JournalEntry, e, ent1jo: jre.Json))) { jre.JournalEntry.Add(jre.Json, cn, tn); //System.Diagnostics.Trace.WriteLine(string.Format("Write Journal to db {0} {1}", jre.JournalEntry.EventTimeUTC, jre.JournalEntry.EventTypeStr)); } } tn.Commit(); } reader.TravelLogUnit.Update(cn); updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); lastnfi = reader; } } updateProgress(-1, ""); }
// given a list of files to reparse, read them and store to db or fire them back (and set firebacklastn to make it work) public void ProcessDetectedNewFiles(List <EDJournalReader> readersToUpdate, Action <int, string> updateProgress, Action <JournalEntry, int, int, int, int> fireback = null, int firebacklastn = 0) { for (int i = 0; i < readersToUpdate.Count; i++) { EDJournalReader reader = readersToUpdate[i]; List <JournalEntry> entries = new List <JournalEntry>(); List <UIEvent> uievents = new List <UIEvent>(); bool readanything = reader.ReadJournal(entries, uievents, historyrefreshparsing: true); // this may create new commanders, and may write to the TLU if (fireback != null) { if (readanything) // need to update TLU pos if read anything { reader.TravelLogUnit.Update(); } if (i >= readersToUpdate.Count - firebacklastn) // if within fireback window { for (int e = 0; e < entries.Count; e++) { //System.Diagnostics.Debug.WriteLine("Fire {0} {1} {2} {3} {4} {5}", entries[e].CommanderId, i, readersToUpdate.Count, e, entries.Count, entries[e].EventTypeStr ); fireback(entries[e], i, readersToUpdate.Count, e, entries.Count); } } } else { UserDatabase.Instance.ExecuteWithDatabase(cn => { // only lookup TLUs if there is actually anything to compare against ILookup <DateTime, JournalEntry> existing = entries.Count > 0 ? JournalEntry.GetAllByTLU(reader.ID, cn.Connection).ToLookup(e => e.EventTimeUTC) : null; //System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF"), i + " into db"); using (DbTransaction tn = cn.Connection.BeginTransaction()) { foreach (JournalEntry jre in entries) { //System.Diagnostics.Trace.WriteLine(string.Format("--- Check {0} {1} Existing {2} : {3}", jre.EventTimeUTC, jre.EventTypeStr, existing[jre.EventTimeUTC].Count(), jre.GetJson().ToString())); if (!existing[jre.EventTimeUTC].Any(e => JournalEntry.AreSameEntry(jre, e, cn.Connection, ent1jo: jre.GetJson(cn.Connection, tn)))) { //foreach (var x in existing[jre.EventTimeUTC]) { System.Diagnostics.Trace.WriteLine(string.Format(" passed vs {0} Deepequals {1}", x.GetJson().ToString(), x.GetJson().DeepEquals(jre.GetJson()))); } BaseUtils.JSON.JObject jo = jre.GetJson(cn.Connection, tn); jre.Add(jo, cn.Connection, tn); //System.Diagnostics.Trace.WriteLine(string.Format("Write Journal to db {0} {1}", jre.EventTimeUTC, jre.EventTypeStr)); } else { //System.Diagnostics.Trace.WriteLine(string.Format("Duplicate Journal to db {0} {1}", jre.EventTimeUTC, jre.EventTypeStr)); } } if (readanything) { reader.TravelLogUnit.Update(cn.Connection, tn); } tn.Commit(); } }); } updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.FullName); lastnfi = reader; } updateProgress(-1, ""); }