public void FillEDSM(HistoryEntry syspos) // call to fill in ESDM data for entry, and also fills in all others pointing to the system object { if (syspos.System.status == SystemStatusEnum.EDSM || syspos.System.EDSMID == -1) // if set already, or we tried and failed.. { //System.Diagnostics.Debug.WriteLine("Checked System {0} already id {1} ", syspos.System.name , syspos.System.id_edsm); return; } ISystem edsmsys = SystemCache.FindSystem(syspos.System); // see if we have it.. if (edsmsys != null) // if we found it externally, fill in info { using (SQLiteConnectionUser uconn = new SQLiteConnectionUser(utc: true)) // lets do this in a transaction for speed. { using (DbTransaction txn = uconn.BeginTransaction()) { FillInSystemFromDBInt(syspos, edsmsys, uconn, txn); // and fill in using this connection/tx txn.Commit(); } } } else { FillInSystemFromDBInt(syspos, null, null, null); // else fill in using null system, which means just mark it checked } }
// Called by ScanForNewEntries (from EDJournalClass Scan Tick Worker) to scan a NFI for new entries private void ScanReader(EDJournalReader nfi, List <JournalEntry> entries, List <UIEvent> uientries) { int netlogpos = 0; try { if (nfi.TravelLogUnit.id == 0) { nfi.TravelLogUnit.type = 3; nfi.TravelLogUnit.Add(); } netlogpos = nfi.TravelLogUnit.Size; bool readanything = nfi.ReadJournal(out List <JournalReaderEntry> ents, out List <UIEvent> uie, historyrefreshparsing: false, resetOnError: false); uientries.AddRange(uie); if (readanything) // if we read, we must update the travel log pos { //System.Diagnostics.Debug.WriteLine("ScanReader " + Path.GetFileName(nfi.FileName) + " read " + ents.Count + " ui " +uientries.Count + " size " + netlogpos); using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { using (DbTransaction txn = cn.BeginTransaction()) { ents = ents.Where(jre => JournalEntry.FindEntry(jre.JournalEntry, jre.Json).Count == 0).ToList(); foreach (JournalReaderEntry jre in ents) { entries.Add(jre.JournalEntry); jre.JournalEntry.Add(jre.Json, cn, txn); } // System.Diagnostics.Debug.WriteLine("Wrote " + ents.Count() + " to db and updated TLU"); nfi.TravelLogUnit.Update(cn); txn.Commit(); } } } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Exception " + ex.Message); // Revert and re-read the failed entries if (nfi != null && nfi.TravelLogUnit != null) { nfi.TravelLogUnit.Size = netlogpos; } throw; } }
private bool ImportJump(ImportSystem sys) { DateTime eventtime = DateTime.SpecifyKind((DateTime)sys.Timestamp, DateTimeKind.Local).ToUniversalTime(); try { using (SQLiteConnectionUser conn = new SQLiteConnectionUser(true, EDDbAccessMode.Writer)) { using (DbCommand exists = conn.CreateCommand("SELECT 1 FROM JournalEntries WHERE EventTypeId = 280 AND commanderId = @com AND EventTime between @low AND @high")) { AddParameterWithValue(exists, "@com", Commander); AddParameterWithValue(exists, "@low", eventtime.AddSeconds(-10)); AddParameterWithValue(exists, "@high", eventtime.AddSeconds(10)); DbDataReader rdr = exists.ExecuteReader(); if (rdr.Read()) { return(true); } } } using (SQLiteConnectionUser conn = new SQLiteConnectionUser(true, EDDbAccessMode.Writer)) { using (DbTransaction txn = conn.BeginTransaction()) { using (DbCommand cmd = conn.CreateCommand("Insert into JournalEntries (TravelLogId,CommanderId,EventTypeId,EventType,EventTime,EventData,EdsmId,Synced) " + "values (@tli,@cid,@eti,@et,@etime,@edata,@edsmid,@synced)", txn)) { AddParameterWithValue(cmd, "@tli", 0); AddParameterWithValue(cmd, "@cid", Commander); AddParameterWithValue(cmd, "@eti", 280); //EDDiscovery.EliteDangerous.JournalTypeEnum.FSDJump AddParameterWithValue(cmd, "@et", "FSDJump"); JObject je = new JObject(); je["timestamp"] = eventtime.ToString("yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"); je["event"] = "FSDJump"; je["StarSystem"] = (sys.SysName); AddParameterWithValue(cmd, "@etime", eventtime); AddParameterWithValue(cmd, "@edata", je.ToString()); AddParameterWithValue(cmd, "@edsmid", 0); AddParameterWithValue(cmd, "@synced", 0); cmd.ExecuteNonQuery(); txn.Commit(); return(true); } } } } catch { return(false); } }
// Called by ScanForNewEntries (from EDJournalClass Scan Tick Worker) to scan a NFI for new entries private void ScanReader(EDJournalReader nfi, List <JournalEntry> entries) { int netlogpos = 0; try { if (nfi.TravelLogUnit.id == 0) { nfi.TravelLogUnit.type = 3; nfi.TravelLogUnit.Add(); } netlogpos = nfi.TravelLogUnit.Size; List <JournalReaderEntry> ents = nfi.ReadJournalLog().ToList(); //System.Diagnostics.Debug.WriteLine("ScanReader " + Path.GetFileName(nfi.FileName) + " read " + ents.Count + " size " + netlogpos); if (ents.Count > 0) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { using (DbTransaction txn = cn.BeginTransaction()) { ents = ents.Where(jre => JournalEntry.FindEntry(jre.JournalEntry, jre.Json).Count == 0).ToList(); foreach (JournalReaderEntry jre in ents) { entries.Add(jre.JournalEntry); jre.JournalEntry.Add(jre.Json, cn, txn); ticksNoActivity = 0; } nfi.TravelLogUnit.Update(cn); txn.Commit(); } } } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Exception " + ex.Message); // Revert and re-read the failed entries if (nfi != null && nfi.TravelLogUnit != null) { nfi.TravelLogUnit.Size = netlogpos; } throw; } }
private void ScanReader(EDJournalReader nfi, List <JournalEntry> entries) { int netlogpos = 0; try { if (nfi.TravelLogUnit.id == 0) { nfi.TravelLogUnit.type = 3; nfi.TravelLogUnit.Add(); } netlogpos = nfi.TravelLogUnit.Size; List <JournalReaderEntry> ents = nfi.ReadJournalLog().ToList(); if (ents.Count > 0) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { using (DbTransaction txn = cn.BeginTransaction()) { ents = ents.Where(jre => JournalEntry.FindEntry(jre.JournalEntry, jre.Json).Count == 0).ToList(); foreach (JournalReaderEntry jre in ents) { entries.Add(jre.JournalEntry); jre.JournalEntry.Add(jre.Json, cn, txn); ticksNoActivity = 0; } nfi.TravelLogUnit.Update(cn); txn.Commit(); } } } } catch { // Revert and re-read the failed entries if (nfi != null && nfi.TravelLogUnit != null) { nfi.TravelLogUnit.Size = netlogpos; } throw; } }
public void FillInPositionsFSDJumps() // call if you want to ensure we have the best posibile position data on FSD Jumps. Only occurs on pre 2.1 with lazy load of just name/edsmid { List <Tuple <HistoryEntry, ISystem> > updatesystems = new List <Tuple <HistoryEntry, ISystem> >(); using (SQLiteConnectionSystem cn = new SQLiteConnectionSystem()) { foreach (HistoryEntry he in historylist) { if (he.IsFSDJump && !he.System.HasCoordinate) // try and load ones without position.. if its got pos we are happy { // done in two IFs for debugging, in case your wondering why! if (he.System.status != SystemStatusEnum.EDSM && he.System.EDSMID == 0) // and its not from EDSM and we have not already tried { ISystem found = SystemCache.FindSystem(he.System, cn); if (found != null) { updatesystems.Add(new Tuple <HistoryEntry, ISystem>(he, found)); } } } } } if (updatesystems.Count > 0) { using (SQLiteConnectionUser uconn = new SQLiteConnectionUser(utc: true)) { using (DbTransaction txn = uconn.BeginTransaction()) // take a transaction over this { foreach (Tuple <HistoryEntry, ISystem> he in updatesystems) { FillInSystemFromDBInt(he.Item1, he.Item2, uconn, txn); // fill, we already have an EDSM system to use } txn.Commit(); } } } }
static public void ParseFiles(string datapath, out string error, int defaultMapColour, Func <bool> cancelRequested, Action <int, string> updateProgress, bool forceReload = false, Dictionary <string, NetLogFileReader> netlogreaders = null, int currentcmdrid = -1) { error = null; if (datapath == null) { error = "Netlog directory not set!"; return; } if (!Directory.Exists(datapath)) // if logfiles directory is not found { error = "Netlog directory is not present!"; return; } if (netlogreaders == null) { netlogreaders = new Dictionary <string, NetLogFileReader>(); } if (currentcmdrid < 0) { currentcmdrid = EDDConfig.Instance.CurrentCmdrID; } // TLUs List <TravelLogUnit> tlus = TravelLogUnit.GetAll(); Dictionary <string, TravelLogUnit> netlogtravelogUnits = tlus.Where(t => t.type == 1).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); Dictionary <long, string> travellogunitid2name = netlogtravelogUnits.Values.ToDictionary(t => t.id, t => t.Name); Dictionary <string, List <JournalLocOrJump> > vsc_lookup = JournalEntry.GetAll().OfType <JournalLocOrJump>().GroupBy(v => v.TLUId).Where(g => travellogunitid2name.ContainsKey(g.Key)).ToDictionary(g => travellogunitid2name[g.Key], g => g.ToList()); // list of systems in journal, sorted by time List <JournalLocOrJump> vsSystemsEnts = JournalEntry.GetAll(currentcmdrid).OfType <JournalLocOrJump>().OrderBy(j => j.EventTimeUTC).ToList(); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(datapath, "netLog.*.log", SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <NetLogFileReader> readersToUpdate = new List <NetLogFileReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, netlogtravelogUnits, vsc_lookup, netlogreaders); if (!netlogtravelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { netlogtravelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } for (int i = 0; i < readersToUpdate.Count; i++) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { int ji = 0; NetLogFileReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); using (DbTransaction tn = cn.BeginTransaction()) { foreach (JObject jo in reader.ReadSystems(cancelRequested, currentcmdrid)) { jo["EDDMapColor"] = defaultMapColour; JournalLocOrJump je = new JournalFSDJump(jo) { TLUId = (int)reader.TravelLogUnit.id, CommanderId = currentcmdrid, }; while (ji < vsSystemsEnts.Count && vsSystemsEnts[ji].EventTimeUTC < je.EventTimeUTC) { ji++; // move to next entry which is bigger in time or equal to ours. } JournalLocOrJump prev = (ji > 0 && (ji - 1) < vsSystemsEnts.Count) ? vsSystemsEnts[ji - 1] : null; JournalLocOrJump next = ji < vsSystemsEnts.Count ? vsSystemsEnts[ji] : null; bool previssame = (prev != null && prev.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!prev.HasCoordinate || !je.HasCoordinate || (prev.StarPos - je.StarPos).LengthSquared < 0.01)); bool nextissame = (next != null && next.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!next.HasCoordinate || !je.HasCoordinate || (next.StarPos - je.StarPos).LengthSquared < 0.01)); // System.Diagnostics.Debug.WriteLine("{0} {1} {2}", ji, vsSystemsEnts[ji].EventTimeUTC, je.EventTimeUTC); if (!(previssame || nextissame)) { je.Add(cn, tn); System.Diagnostics.Debug.WriteLine("Add {0} {1}", je.EventTimeUTC, je.EventDataString); } } tn.Commit(); reader.TravelLogUnit.Update(); } if (updateProgress != null) { updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); } } } }
public static HistoryList LoadHistory(EDJournalClass journalmonitor, Func <bool> cancelRequested, Action <int, string> reportProgress, string NetLogPath = null, bool ForceNetLogReload = false, bool ForceJournalReload = false, int CurrentCommander = Int32.MinValue, bool Keepuievents = true) { HistoryList hist = new HistoryList(); EDCommander cmdr = null; if (CurrentCommander >= 0) { cmdr = EDCommander.GetCommander(CurrentCommander); journalmonitor.ParseJournalFiles(() => cancelRequested(), (p, s) => reportProgress(p, s), forceReload: ForceJournalReload); // Parse files stop monitor.. if (NetLogPath != null) { string errstr = null; NetLogClass.ParseFiles(NetLogPath, out errstr, EliteConfigInstance.InstanceConfig.DefaultMapColour, () => cancelRequested(), (p, s) => reportProgress(p, s), ForceNetLogReload, currentcmdrid: CurrentCommander); } } reportProgress(-1, "Resolving systems"); List <JournalEntry> jlist = JournalEntry.GetAll(CurrentCommander).OrderBy(x => x.EventTimeUTC).ThenBy(x => x.Id).ToList(); List <Tuple <JournalEntry, HistoryEntry> > jlistUpdated = new List <Tuple <JournalEntry, HistoryEntry> >(); using (SQLiteConnectionSystem conn = new SQLiteConnectionSystem()) { HistoryEntry prev = null; JournalEntry jprev = null; foreach (JournalEntry je in jlist) { if (MergeEntries(jprev, je)) // if we merge.. we may have updated info, so reprint. { jprev.FillInformation(out prev.EventSummary, out prev.EventDescription, out prev.EventDetailedInfo); // need to keep this up to date.. continue; } if (je.IsUIEvent && !Keepuievents) // filter out any UI events { //System.Diagnostics.Debug.WriteLine("**** Filter out " + je.EventTypeStr + " on " + je.EventTimeLocal.ToString()); continue; } bool journalupdate = false; HistoryEntry he = HistoryEntry.FromJournalEntry(je, prev, out journalupdate, conn, cmdr); prev = he; jprev = je; hist.historylist.Add(he); if (journalupdate) { jlistUpdated.Add(new Tuple <JournalEntry, HistoryEntry>(je, he)); Debug.WriteLine("Queued update requested {0} {1}", he.System.EDSMID, he.System.Name); } } } if (jlistUpdated.Count > 0) { reportProgress(-1, "Updating journal entries"); using (SQLiteConnectionUser conn = new SQLiteConnectionUser(utc: true)) { using (DbTransaction txn = conn.BeginTransaction()) { foreach (Tuple <JournalEntry, HistoryEntry> jehe in jlistUpdated) { JournalEntry je = jehe.Item1; HistoryEntry he = jehe.Item2; double dist = (je is JournalFSDJump) ? (je as JournalFSDJump).JumpDist : 0; bool updatecoord = (je is JournalLocOrJump) ? (!(je as JournalLocOrJump).HasCoordinate && he.System.HasCoordinate) : false; Debug.WriteLine("Push update {0} {1} to JE {2} HE {3}", he.System.EDSMID, he.System.Name, je.Id, he.Indexno); JournalEntry.UpdateEDSMIDPosJump(je.Id, he.System, updatecoord, dist, conn, txn); } txn.Commit(); } } } // now database has been updated due to initial fill, now fill in stuff which needs the user database hist.CommanderId = CurrentCommander; hist.ProcessUserHistoryListEntries(h => h.ToList()); // here, we update the DBs in HistoryEntry and any global DBs in historylist return(hist); }
static private bool SendToEDSM(List <HistoryEntry> hl, EDCommander cmdr, out string errmsg) { EDSMClass edsm = new EDSMClass(cmdr); // Ensure we use the commanders EDSM credentials. errmsg = null; List <JObject> entries = new List <JObject>(); foreach (HistoryEntry he in hl) { JournalEntry je = he.journalEntry; if (je == null) { je = JournalEntry.Get(he.Journalid); } JObject json = je.GetJson(); RemoveCommonKeys(json); if (je.EventTypeID == JournalTypeEnum.FSDJump && json["FuelUsed"].Empty()) { json["_convertedNetlog"] = true; } if (json["StarPosFromEDSM"].Bool(false)) // Remove star pos from EDSM { json.Remove("StarPos"); } json.Remove("StarPosFromEDSM"); json["_systemName"] = he.System.Name; json["_systemCoordinates"] = new JArray(he.System.X, he.System.Y, he.System.Z); if (he.System.SystemAddress != null) { json["_systemAddress"] = he.System.SystemAddress; } if (he.IsDocked) { json["_stationName"] = he.StationName; if (he.MarketID != null) { json["_stationMarketId"] = he.MarketID; } } json["_shipId"] = he.ShipId; entries.Add(json); } List <JObject> results = edsm.SendJournalEvents(entries, out errmsg); if (results == null) { return(false); } else { using (var cn = new SQLiteConnectionUser(utc: true)) { using (var txn = cn.BeginTransaction()) { for (int i = 0; i < hl.Count && i < results.Count; i++) { HistoryEntry he = hl[i]; JObject result = results[i]; int msgnr = result["msgnum"].Int(); int systemId = result["systemId"].Int(); bool systemCreated = result["systemCreated"].Bool(); if ((msgnr >= 100 && msgnr < 200) || msgnr == 500) { if (he.EntryType == JournalTypeEnum.FSDJump || he.EntryType == JournalTypeEnum.Location) { if (systemId != 0) { he.System.EDSMID = systemId; JournalEntry.UpdateEDSMIDPosJump(he.Journalid, he.System, false, 0, cn, txn); } if (systemCreated) { he.SetFirstDiscover(true); } } he.SetEdsmSync(cn, txn); if (msgnr == 500) { System.Diagnostics.Trace.WriteLine($"Warning submitting event {he.Journalid} \"{he.EventSummary}\": {msgnr} {result["msg"].Str()}"); } } else { System.Diagnostics.Trace.WriteLine($"Error submitting event {he.Journalid} \"{he.EventSummary}\": {msgnr} {result["msg"].Str()}"); } } txn.Commit(); } } return(true); } }
private bool ImportNote(ImportSystem sys) { long noteID = -1; long journalID = 0; string existingNote = ""; try { using (SQLiteConnectionUser conn = new SQLiteConnectionUser(true, EDDbAccessMode.Writer)) { using (DbCommand exists = conn.CreateCommand("SELECT id, note FROM SystemNote WHERE Name = @sys")) { AddParameterWithValue(exists, "@sys", sys.SysName); DbDataReader rdr = exists.ExecuteReader(); if (rdr.Read()) { noteID = rdr.GetInt64(0); existingNote = rdr.GetString(1); } } if (noteID < 0) { using (DbCommand getjournal = conn.CreateCommand(string.Format("select max(id) from journalentries where eventtypeid = 280 and eventdata like '%{0}%'", sys.SysName))) { DbDataReader dr = getjournal.ExecuteReader(); if (dr.Read()) { journalID = dr.GetInt64(0); } } } } using (SQLiteConnectionUser conn = new SQLiteConnectionUser(true, EDDbAccessMode.Writer)) { if (noteID > 0) { using (DbTransaction txn = conn.BeginTransaction()) { using (DbCommand update = conn.CreateCommand("UPDATE SystemNote SET note = @note WHERE id = @id", txn)) { string newNote = String.IsNullOrEmpty(existingNote) ? sys.Notes : existingNote + " | " + sys.Notes; AddParameterWithValue(update, "@note", newNote); AddParameterWithValue(update, "@id", noteID); update.ExecuteNonQuery(); } txn.Commit(); } } else { using (DbTransaction txn = conn.BeginTransaction()) { using (DbCommand insert = conn.CreateCommand("Insert into SystemNote (Name, Time, Note, journalid, edsmid) values (@name, @time, @note, @journalid, @edsmid)", txn)) { AddParameterWithValue(insert, "@name", sys.SysName); AddParameterWithValue(insert, "@time", sys.Timestamp); AddParameterWithValue(insert, "@note", sys.Notes); AddParameterWithValue(insert, "@journalid", journalID); AddParameterWithValue(insert, "@edsmid", -1); insert.ExecuteNonQuery(); } txn.Commit(); } } } return(true); } catch { return(false); } }
public void ParseJournalFiles(Func <bool> cancelRequested, Action <int, string> updateProgress, bool forceReload = false) { System.Diagnostics.Trace.WriteLine("Scanned " + WatcherFolder); Dictionary <string, TravelLogUnit> m_travelogUnits = TravelLogUnit.GetAll().Where(t => (t.type & 0xFF) == 3).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(WatcherFolder, journalfilematch, SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <EDJournalReader> readersToUpdate = new List <EDJournalReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, m_travelogUnits); // open it if (!m_travelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { m_travelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.type = 3; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } for (int i = 0; i < readersToUpdate.Count; i++) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { EDJournalReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); List <JournalReaderEntry> entries = reader.ReadJournalLog(true).ToList(); // this may create new commanders, and may write to the TLU db ILookup <DateTime, JournalEntry> existing = JournalEntry.GetAllByTLU(reader.TravelLogUnit.id).ToLookup(e => e.EventTimeUTC); using (DbTransaction tn = cn.BeginTransaction()) { foreach (JournalReaderEntry jre in entries) { if (!existing[jre.JournalEntry.EventTimeUTC].Any(e => JournalEntry.AreSameEntry(jre.JournalEntry, e, ent1jo: jre.Json))) { jre.JournalEntry.Add(jre.Json, cn, tn); System.Diagnostics.Trace.WriteLine(string.Format("Write Journal to db {0} {1}", jre.JournalEntry.EventTimeUTC, jre.JournalEntry.EventTypeStr)); } } tn.Commit(); } reader.TravelLogUnit.Update(cn); updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); lastnfi = reader; } } updateProgress(-1, ""); }
public static HistoryList LoadHistory(EDJournalClass journalmonitor, Func <bool> cancelRequested, Action <int, string> reportProgress, string NetLogPath = null, bool ForceNetLogReload = false, bool ForceJournalReload = false, bool CheckEdsm = false, int CurrentCommander = Int32.MinValue) { HistoryList hist = new HistoryList(); EDCommander cmdr = null; if (CurrentCommander >= 0) { cmdr = EDCommander.GetCommander(CurrentCommander); journalmonitor.ParseJournalFiles(() => cancelRequested(), (p, s) => reportProgress(p, s), forceReload: ForceJournalReload); // Parse files stop monitor.. if (NetLogPath != null) { string errstr = null; NetLogClass.ParseFiles(NetLogPath, out errstr, EliteConfigInstance.InstanceConfig.DefaultMapColour, () => cancelRequested(), (p, s) => reportProgress(p, s), ForceNetLogReload, currentcmdrid: CurrentCommander); } } reportProgress(-1, "Resolving systems"); List <JournalEntry> jlist = JournalEntry.GetAll(CurrentCommander).OrderBy(x => x.EventTimeUTC).ThenBy(x => x.Id).ToList(); List <Tuple <JournalEntry, HistoryEntry> > jlistUpdated = new List <Tuple <JournalEntry, HistoryEntry> >(); using (SQLiteConnectionSystem conn = new SQLiteConnectionSystem()) { HistoryEntry prev = null; foreach (JournalEntry inje in jlist) { foreach (JournalEntry je in hist.ProcessJournalEntry(inje)) { bool journalupdate = false; HistoryEntry he = HistoryEntry.FromJournalEntry(je, prev, CheckEdsm, out journalupdate, conn, cmdr); prev = he; hist.historylist.Add(he); if (journalupdate) { jlistUpdated.Add(new Tuple <JournalEntry, HistoryEntry>(je, he)); } } } } if (jlistUpdated.Count > 0) { reportProgress(-1, "Updating journal entries"); using (SQLiteConnectionUser conn = new SQLiteConnectionUser(utc: true)) { using (DbTransaction txn = conn.BeginTransaction()) { foreach (Tuple <JournalEntry, HistoryEntry> jehe in jlistUpdated) { JournalEntry je = jehe.Item1; HistoryEntry he = jehe.Item2; JournalFSDJump jfsd = je as JournalFSDJump; if (jfsd != null) { JournalEntry.UpdateEDSMIDPosJump(jfsd.Id, he.System, !jfsd.HasCoordinate && he.System.HasCoordinate, jfsd.JumpDist, conn, txn); } } txn.Commit(); } } } // now database has been updated due to initial fill, now fill in stuff which needs the user database hist.CommanderId = CurrentCommander; hist.ProcessUserHistoryListEntries(h => h.ToList()); // here, we update the DBs in HistoryEntry and any global DBs in historylist hist.SendEDSMStatusInfo(hist.GetLast, true); return(hist); }
public List <JournalEntry> ScanForNewEntries() { var entries = new List <JournalEntry>(); int netlogpos = 0; EDJournalReader nfi = null; try { string filename = null; if (m_netLogFileQueue.TryDequeue(out filename)) // if a new one queued, we swap to using it { nfi = OpenFileReader(new FileInfo(filename)); lastnfi = nfi; System.Diagnostics.Trace.WriteLine(string.Format("Change in file, scan {0}", lastnfi.FileName)); } else if (ticksNoActivity >= 30 && (lastnfi == null || (!File.Exists(lastnfi.FileName) || lastnfi.filePos >= new FileInfo(lastnfi.FileName).Length))) { if (lastnfi == null) { Trace.Write($"No last file - scanning for journals"); } else if (!File.Exists(lastnfi.FileName)) { Trace.WriteLine($"File {lastnfi.FileName} not found - scanning for journals"); } else { Trace.WriteLine($"No activity on {lastnfi.FileName} for 60 seconds ({lastnfi.filePos} >= {new FileInfo(lastnfi.FileName).Length} - scanning for new journals"); } HashSet <string> tlunames = new HashSet <string>(TravelLogUnit.GetAllNames()); string[] filenames = Directory.EnumerateFiles(m_watcherfolder, "Journal.*.log", SearchOption.AllDirectories) .Select(s => new { name = Path.GetFileName(s), fullname = s }) .Where(s => !tlunames.Contains(s.name)) .OrderBy(s => s.name) .Select(s => s.fullname) .ToArray(); ticksNoActivity = 0; foreach (var name in filenames) { nfi = OpenFileReader(new FileInfo(name)); lastnfi = nfi; break; } } else { nfi = lastnfi; } ticksNoActivity++; if (nfi != null) { if (nfi.TravelLogUnit.id == 0) { nfi.TravelLogUnit.type = 3; nfi.TravelLogUnit.Add(); } netlogpos = nfi.TravelLogUnit.Size; List <JournalEntry> ents = nfi.ReadJournalLog().ToList(); if (ents.Count > 0) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { using (DbTransaction txn = cn.BeginTransaction()) { ents = ents.Where(je => JournalEntry.FindEntry(je).Count == 0).ToList(); foreach (JournalEntry je in ents) { entries.Add(je); je.Add(cn, txn); ticksNoActivity = 0; } nfi.TravelLogUnit.Update(cn); txn.Commit(); } } } } return(entries); } catch (Exception ex) { // Revert and re-read the failed entries if (nfi != null && nfi.TravelLogUnit != null) { nfi.TravelLogUnit.Size = netlogpos; } System.Diagnostics.Trace.WriteLine("Net tick exception : " + ex.Message); System.Diagnostics.Trace.WriteLine(ex.StackTrace); throw; } }