public static int RemoveDuplicateFSDEntries(int currentcmdrid) { // list of systems in journal, sorted by time List <JournalLocOrJump> vsSystemsEnts = JournalEntry.GetAll(currentcmdrid).OfType <JournalLocOrJump>().OrderBy(j => j.EventTimeUTC).ToList(); int count = 0; using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { for (int ji = 1; ji < vsSystemsEnts.Count; ji++) { JournalEvents.JournalFSDJump prev = vsSystemsEnts[ji - 1] as JournalEvents.JournalFSDJump; JournalEvents.JournalFSDJump current = vsSystemsEnts[ji] as JournalEvents.JournalFSDJump; if (prev != null && current != null) { bool previssame = (prev.StarSystem.Equals(current.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!prev.HasCoordinate || !current.HasCoordinate || (prev.StarPos - current.StarPos).LengthSquared < 0.01)); if (previssame) { Delete(prev.Id, cn); count++; System.Diagnostics.Debug.WriteLine("Dup {0} {1} {2} {3}", prev.Id, current.Id, prev.StarSystem, current.StarSystem); } } } } return(count); }
static public void ParseFiles(string datapath, out string error, int defaultMapColour, Func <bool> cancelRequested, Action <int, string> updateProgress, bool forceReload = false, Dictionary <string, NetLogFileReader> netlogreaders = null, int currentcmdrid = -1) { error = null; if (datapath == null) { error = "Netlog directory not set!"; return; } if (!Directory.Exists(datapath)) // if logfiles directory is not found { error = "Netlog directory is not present!"; return; } if (netlogreaders == null) { netlogreaders = new Dictionary <string, NetLogFileReader>(); } if (currentcmdrid < 0) { currentcmdrid = EDCommander.CurrentCmdrID; } // TLUs List <TravelLogUnit> tlus = TravelLogUnit.GetAll(); Dictionary <string, TravelLogUnit> netlogtravelogUnits = tlus.Where(t => t.type == 1).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); Dictionary <long, string> travellogunitid2name = netlogtravelogUnits.Values.ToDictionary(t => t.id, t => t.Name); Dictionary <string, List <JournalLocOrJump> > vsc_lookup = JournalEntry.GetAll().OfType <JournalLocOrJump>().GroupBy(v => v.TLUId).Where(g => travellogunitid2name.ContainsKey(g.Key)).ToDictionary(g => travellogunitid2name[g.Key], g => g.ToList()); // list of systems in journal, sorted by time List <JournalLocOrJump> vsSystemsEnts = JournalEntry.GetAll(currentcmdrid).OfType <JournalLocOrJump>().OrderBy(j => j.EventTimeUTC).ToList(); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(datapath, "netLog.*.log", SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <NetLogFileReader> readersToUpdate = new List <NetLogFileReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, netlogtravelogUnits, vsc_lookup, netlogreaders); if (!netlogtravelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { netlogtravelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } for (int i = 0; i < readersToUpdate.Count; i++) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { int ji = 0; NetLogFileReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); using (DbTransaction tn = cn.BeginTransaction()) { foreach (JObject jo in reader.ReadSystems(cancelRequested, currentcmdrid)) { jo["EDDMapColor"] = defaultMapColour; JournalLocOrJump je = new JournalFSDJump(jo) { TLUId = (int)reader.TravelLogUnit.id, CommanderId = currentcmdrid, }; while (ji < vsSystemsEnts.Count && vsSystemsEnts[ji].EventTimeUTC < je.EventTimeUTC) { ji++; // move to next entry which is bigger in time or equal to ours. } JournalLocOrJump prev = (ji > 0 && (ji - 1) < vsSystemsEnts.Count) ? vsSystemsEnts[ji - 1] : null; JournalLocOrJump next = ji < vsSystemsEnts.Count ? vsSystemsEnts[ji] : null; bool previssame = (prev != null && prev.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!prev.HasCoordinate || !je.HasCoordinate || (prev.StarPos - je.StarPos).LengthSquared < 0.01)); bool nextissame = (next != null && next.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!next.HasCoordinate || !je.HasCoordinate || (next.StarPos - je.StarPos).LengthSquared < 0.01)); // System.Diagnostics.Debug.WriteLine("{0} {1} {2}", ji, vsSystemsEnts[ji].EventTimeUTC, je.EventTimeUTC); if (!(previssame || nextissame)) { je.Add(jo, cn, tn); System.Diagnostics.Debug.WriteLine("Add {0} {1}", je.EventTimeUTC, jo.ToString()); } } tn.Commit(); reader.TravelLogUnit.Update(); } if (updateProgress != null) { updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); } } } }
public static HistoryList LoadHistory(EDJournalClass journalmonitor, Func <bool> cancelRequested, Action <int, string> reportProgress, string NetLogPath = null, bool ForceNetLogReload = false, bool ForceJournalReload = false, int CurrentCommander = Int32.MinValue, bool Keepuievents = true) { HistoryList hist = new HistoryList(); EDCommander cmdr = null; if (CurrentCommander >= 0) { cmdr = EDCommander.GetCommander(CurrentCommander); journalmonitor.ParseJournalFiles(() => cancelRequested(), (p, s) => reportProgress(p, s), forceReload: ForceJournalReload); // Parse files stop monitor.. if (NetLogPath != null) { string errstr = null; NetLogClass.ParseFiles(NetLogPath, out errstr, EliteConfigInstance.InstanceConfig.DefaultMapColour, () => cancelRequested(), (p, s) => reportProgress(p, s), ForceNetLogReload, currentcmdrid: CurrentCommander); } } reportProgress(-1, "Resolving systems"); List <JournalEntry> jlist = JournalEntry.GetAll(CurrentCommander).OrderBy(x => x.EventTimeUTC).ThenBy(x => x.Id).ToList(); List <Tuple <JournalEntry, HistoryEntry> > jlistUpdated = new List <Tuple <JournalEntry, HistoryEntry> >(); using (SQLiteConnectionSystem conn = new SQLiteConnectionSystem()) { HistoryEntry prev = null; JournalEntry jprev = null; foreach (JournalEntry je in jlist) { if (MergeEntries(jprev, je)) // if we merge.. we may have updated info, so reprint. { jprev.FillInformation(out prev.EventSummary, out prev.EventDescription, out prev.EventDetailedInfo); // need to keep this up to date.. continue; } if (je.IsUIEvent && !Keepuievents) // filter out any UI events { //System.Diagnostics.Debug.WriteLine("**** Filter out " + je.EventTypeStr + " on " + je.EventTimeLocal.ToString()); continue; } bool journalupdate = false; HistoryEntry he = HistoryEntry.FromJournalEntry(je, prev, out journalupdate, conn, cmdr); prev = he; jprev = je; hist.historylist.Add(he); if (journalupdate) { jlistUpdated.Add(new Tuple <JournalEntry, HistoryEntry>(je, he)); Debug.WriteLine("Queued update requested {0} {1}", he.System.EDSMID, he.System.Name); } } } if (jlistUpdated.Count > 0) { reportProgress(-1, "Updating journal entries"); using (SQLiteConnectionUser conn = new SQLiteConnectionUser(utc: true)) { using (DbTransaction txn = conn.BeginTransaction()) { foreach (Tuple <JournalEntry, HistoryEntry> jehe in jlistUpdated) { JournalEntry je = jehe.Item1; HistoryEntry he = jehe.Item2; double dist = (je is JournalFSDJump) ? (je as JournalFSDJump).JumpDist : 0; bool updatecoord = (je is JournalLocOrJump) ? (!(je as JournalLocOrJump).HasCoordinate && he.System.HasCoordinate) : false; Debug.WriteLine("Push update {0} {1} to JE {2} HE {3}", he.System.EDSMID, he.System.Name, je.Id, he.Indexno); JournalEntry.UpdateEDSMIDPosJump(je.Id, he.System, updatecoord, dist, conn, txn); } txn.Commit(); } } } // now database has been updated due to initial fill, now fill in stuff which needs the user database hist.CommanderId = CurrentCommander; hist.ProcessUserHistoryListEntries(h => h.ToList()); // here, we update the DBs in HistoryEntry and any global DBs in historylist return(hist); }
public static HistoryList LoadHistory(EDJournalClass journalmonitor, Func <bool> cancelRequested, Action <int, string> reportProgress, string NetLogPath = null, bool ForceNetLogReload = false, bool ForceJournalReload = false, bool CheckEdsm = false, int CurrentCommander = Int32.MinValue) { HistoryList hist = new HistoryList(); EDCommander cmdr = null; if (CurrentCommander >= 0) { cmdr = EDCommander.GetCommander(CurrentCommander); journalmonitor.ParseJournalFiles(() => cancelRequested(), (p, s) => reportProgress(p, s), forceReload: ForceJournalReload); // Parse files stop monitor.. if (NetLogPath != null) { string errstr = null; NetLogClass.ParseFiles(NetLogPath, out errstr, EliteConfigInstance.InstanceConfig.DefaultMapColour, () => cancelRequested(), (p, s) => reportProgress(p, s), ForceNetLogReload, currentcmdrid: CurrentCommander); } } reportProgress(-1, "Resolving systems"); List <JournalEntry> jlist = JournalEntry.GetAll(CurrentCommander).OrderBy(x => x.EventTimeUTC).ThenBy(x => x.Id).ToList(); List <Tuple <JournalEntry, HistoryEntry> > jlistUpdated = new List <Tuple <JournalEntry, HistoryEntry> >(); using (SQLiteConnectionSystem conn = new SQLiteConnectionSystem()) { HistoryEntry prev = null; foreach (JournalEntry inje in jlist) { foreach (JournalEntry je in hist.ProcessJournalEntry(inje)) { bool journalupdate = false; HistoryEntry he = HistoryEntry.FromJournalEntry(je, prev, CheckEdsm, out journalupdate, conn, cmdr); prev = he; hist.historylist.Add(he); if (journalupdate) { jlistUpdated.Add(new Tuple <JournalEntry, HistoryEntry>(je, he)); } } } } if (jlistUpdated.Count > 0) { reportProgress(-1, "Updating journal entries"); using (SQLiteConnectionUser conn = new SQLiteConnectionUser(utc: true)) { using (DbTransaction txn = conn.BeginTransaction()) { foreach (Tuple <JournalEntry, HistoryEntry> jehe in jlistUpdated) { JournalEntry je = jehe.Item1; HistoryEntry he = jehe.Item2; JournalFSDJump jfsd = je as JournalFSDJump; if (jfsd != null) { JournalEntry.UpdateEDSMIDPosJump(jfsd.Id, he.System, !jfsd.HasCoordinate && he.System.HasCoordinate, jfsd.JumpDist, conn, txn); } } txn.Commit(); } } } // now database has been updated due to initial fill, now fill in stuff which needs the user database hist.CommanderId = CurrentCommander; hist.ProcessUserHistoryListEntries(h => h.ToList()); // here, we update the DBs in HistoryEntry and any global DBs in historylist hist.SendEDSMStatusInfo(hist.GetLast, true); return(hist); }
// History load system, read DB for entries and make a history up public static HistoryList LoadHistory(Action <string> reportProgress, int CurrentCommander, int fullhistoryloaddaylimit, string essentialitems ) { HistoryList hist = new HistoryList(); Trace.WriteLine(BaseUtils.AppTicks.TickCountLapDelta("HLL", true).Item1 + " History Load"); reportProgress("Reading Database"); List <JournalEntry> jlist; // returned in date ascending, oldest first order. if (fullhistoryloaddaylimit > 0) { var list = (essentialitems == nameof(JournalEssentialEvents.JumpScanEssentialEvents)) ? JournalEssentialEvents.JumpScanEssentialEvents : (essentialitems == nameof(JournalEssentialEvents.JumpEssentialEvents)) ? JournalEssentialEvents.JumpEssentialEvents : (essentialitems == nameof(JournalEssentialEvents.NoEssentialEvents)) ? JournalEssentialEvents.NoEssentialEvents : (essentialitems == nameof(JournalEssentialEvents.FullStatsEssentialEvents)) ? JournalEssentialEvents.FullStatsEssentialEvents : JournalEssentialEvents.EssentialEvents; jlist = JournalEntry.GetAll(CurrentCommander, ids: list, allidsafterutc: DateTime.UtcNow.Subtract(new TimeSpan(fullhistoryloaddaylimit, 0, 0, 0)) ); } else { jlist = JournalEntry.GetAll(CurrentCommander); } Trace.WriteLine(BaseUtils.AppTicks.TickCountLapDelta("HLL").Item1 + " Journals read from DB"); reportProgress("Creating History"); HistoryEntry hprev = null; foreach (JournalEntry je in jlist) { if (MergeOrDiscardEntries(hprev?.journalEntry, je)) // if we merge, don't store into HE { continue; } // Clean up "UnKnown" systems from EDSM log if (je is JournalFSDJump && ((JournalFSDJump)je).StarSystem == "UnKnown") { JournalEntry.Delete(je.Id); continue; } if (je is EliteDangerousCore.JournalEvents.JournalMusic) // remove music.. not shown.. now UI event. remove it for backwards compatibility { //System.Diagnostics.Debug.WriteLine("**** Filter out " + je.EventTypeStr + " on " + je.EventTimeLocal.ToString()); continue; } HistoryEntry he = HistoryEntry.FromJournalEntry(je, hprev); // create entry he.UpdateMaterialsCommodities(hist.MaterialCommoditiesMicroResources.Process(je)); if (CheckForRemoval(he, hprev)) // check here to see if we want to remove the entry.. can move this lower later, but at first point where we have the data { continue; } hist.historylist.Add(he); // now add it to the history hprev = he; } Trace.WriteLine(BaseUtils.AppTicks.TickCountLapDelta("HLL").Item1 + " History List Created"); reportProgress("Analysing History"); for (int i = 0; i < hist.historylist.Count; i++) { HistoryEntry he = hist.historylist[i]; JournalEntry je = he.journalEntry; he.UpdateStats(je, hist.statisticsaccumulator, he.StationFaction); he.UpdateSystemNote(); hist.CashLedger.Process(je); // update the ledger he.Credits = hist.CashLedger.CashTotal; hist.Shipyards.Process(je); hist.Outfitting.Process(je); Tuple <ShipInformation, ModulesInStore> ret = hist.ShipInformationList.Process(je, he.WhereAmI, he.System); // the ships he.UpdateShipInformation(ret.Item1); he.UpdateShipStoredModules(ret.Item2); he.UpdateMissionList(hist.MissionListAccumulator.Process(je, he.System, he.WhereAmI)); he.UpdateWeapons(hist.WeaponList.Process(je, he.WhereAmI, he.System)); // update the entries in suit entry list he.UpdateSuits(hist.SuitList.Process(je, he.WhereAmI, he.System)); he.UpdateLoadouts(hist.SuitLoadoutList.Process(je, hist.WeaponList, he.WhereAmI, he.System)); AddToVisitsScan(hist, i, null); // add to scan but don't complain if can't add } //for (int i = hist.Count - 10; i < hist.Count; i++) System.Diagnostics.Debug.WriteLine("Hist {0} {1} {2}", hist[i].EventTimeUTC, hist[i].Indexno , hist[i].EventSummary); // now database has been updated due to initial fill, now fill in stuff which needs the user database Trace.WriteLine(BaseUtils.AppTicks.TickCountLapDelta("HLL").Item1 + " Anaylsis End"); hist.CommanderId = CurrentCommander; #if LISTSCANS { using (var fileout = new System.IO.StreamWriter(@"c:\code\scans.csv")) { fileout.WriteLine($"System,0,fullname,ownname,customname,bodyname,bodydesignation, bodyid,parentlist"); foreach (var sn in hist.StarScan.ScansSortedByName()) { foreach (var body in sn.Bodies) { string pl = body.ScanData?.ParentList(); fileout.WriteLine($"{sn.System.Name},0, {body.FullName},{body.OwnName},{body.CustomName},{body.ScanData?.BodyName},{body.ScanData?.BodyDesignation},{body.BodyID},{pl}"); } } } } #endif return(hist); }
// History load system, read DB for entries and make a history up public static HistoryList LoadHistory(Action <string> reportProgress, int CurrentCommander, int fullhistoryloaddaylimit, string essentialitems ) { HistoryList hist = new HistoryList(); Trace.WriteLine(BaseUtils.AppTicks.TickCountLapDelta("HLL", true).Item1 + " History Load"); reportProgress("Reading Database"); List <JournalEntry> jlist; // returned in date ascending, oldest first order. if (fullhistoryloaddaylimit > 0) { var list = (essentialitems == nameof(JournalEssentialEvents.JumpScanEssentialEvents)) ? JournalEssentialEvents.JumpScanEssentialEvents : (essentialitems == nameof(JournalEssentialEvents.JumpEssentialEvents)) ? JournalEssentialEvents.JumpEssentialEvents : (essentialitems == nameof(JournalEssentialEvents.NoEssentialEvents)) ? JournalEssentialEvents.NoEssentialEvents : (essentialitems == nameof(JournalEssentialEvents.FullStatsEssentialEvents)) ? JournalEssentialEvents.FullStatsEssentialEvents : JournalEssentialEvents.EssentialEvents; jlist = JournalEntry.GetAll(CurrentCommander, ids: list, allidsafterutc: DateTime.UtcNow.Subtract(new TimeSpan(fullhistoryloaddaylimit, 0, 0, 0)) ); } else { jlist = JournalEntry.GetAll(CurrentCommander); } Trace.WriteLine(BaseUtils.AppTicks.TickCountLapDelta("HLL").Item1 + " Journals read from DB"); reportProgress("Creating History"); hist.hlastprocessed = null; foreach (JournalEntry je in jlist) { if (MergeOrDiscardEntries(hist.hlastprocessed?.journalEntry, je)) // if we merge, don't store into HE { continue; } // Clean up "UnKnown" systems from EDSM log if (je is JournalFSDJump && ((JournalFSDJump)je).StarSystem == "UnKnown") { JournalEntry.Delete(je.Id); continue; } HistoryEntry he = HistoryEntry.FromJournalEntry(je, hist.hlastprocessed); // create entry he.UpdateMaterialsCommodities(hist.MaterialCommoditiesMicroResources.Process(je, hist.hlastprocessed?.journalEntry, he.Status.TravelState == HistoryEntryStatus.TravelStateType.SRV)); // IN THIS order, so suits can be added, then weapons, then loadouts he.UpdateSuits(hist.SuitList.Process(je, he.WhereAmI, he.System)); he.UpdateWeapons(hist.WeaponList.Process(je, he.WhereAmI, he.System)); // update the entries in suit entry list he.UpdateLoadouts(hist.SuitLoadoutList.Process(je, hist.WeaponList, he.WhereAmI, he.System)); he.UpdateStats(je, hist.statisticsaccumulator, he.StationFaction); he.UpdateSystemNote(); hist.CashLedger.Process(je); // update the ledger he.Credits = hist.CashLedger.CashTotal; hist.Shipyards.Process(je); hist.Outfitting.Process(je); Tuple <ShipInformation, ModulesInStore> ret = hist.ShipInformationList.Process(je, he.WhereAmI, he.System); // the ships he.UpdateShipInformation(ret.Item1); he.UpdateShipStoredModules(ret.Item2); he.UpdateMissionList(hist.MissionListAccumulator.Process(je, he.System, he.WhereAmI)); hist.hlastprocessed = he; var reorderlist = hist.ReorderRemove(he); foreach (var heh in reorderlist.EmptyIfNull()) { // System.Diagnostics.Debug.WriteLine(" ++ {0} {1}", heh.EventTimeUTC.ToString(), heh.EntryType); heh.Index = hist.historylist.Count; // store its index for quick ordering, after all removal etc hist.historylist.Add(heh); // then add to history hist.AddToVisitsScan(null); // add to scan database but don't complain } } Trace.WriteLine(BaseUtils.AppTicks.TickCountLapDelta("HLL").Item1 + " History List Created"); foreach (var s in hist.StarScan.ToProcess) { System.Diagnostics.Debug.WriteLine("StarScan could not find " + s.Item2.SystemAddress + " at " + s.Item1.EventTimeUTC); } //for (int i = hist.Count - 10; i < hist.Count; i++) System.Diagnostics.Debug.WriteLine("Hist {0} {1} {2}", hist[i].EventTimeUTC, hist[i].Indexno , hist[i].EventSummary); // now database has been updated due to initial fill, now fill in stuff which needs the user database Trace.WriteLine(BaseUtils.AppTicks.TickCountLapDelta("HLL").Item1 + " Anaylsis End"); hist.CommanderId = CurrentCommander; #if LISTSCANS { using (var fileout = new System.IO.StreamWriter(@"c:\code\scans.csv")) { fileout.WriteLine($"System,0,fullname,ownname,customname,bodyname,bodydesignation, bodyid,parentlist"); foreach (var sn in hist.StarScan.ScansSortedByName()) { foreach (var body in sn.Bodies) { string pl = body.ScanData?.ParentList(); fileout.WriteLine($"{sn.System.Name},0, {body.FullName},{body.OwnName},{body.CustomName},{body.ScanData?.BodyName},{body.ScanData?.BodyDesignation},{body.BodyID},{pl}"); } } } } #endif return(hist); }
static public void ParseFiles(string datapath, out string error, int defaultMapColour, Func <bool> cancelRequested, Action <int, string> updateProgress, bool forceReload, int currentcmdrid) { error = null; if (datapath == null) { error = "Netlog directory not set!"; return; } if (!Directory.Exists(datapath)) // if logfiles directory is not found { error = "Netlog directory is not present!"; return; } // list of systems in journal, sorted by time List <JournalLocOrJump> vsSystemsEnts = JournalEntry.GetAll(currentcmdrid).OfType <JournalLocOrJump>().OrderBy(j => j.EventTimeUTC).ToList(); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(datapath, "netLog.*.log", SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <NetLogFileReader> readersToUpdate = new List <NetLogFileReader>(); List <TravelLogUnit> tlutoadd = new List <TravelLogUnit>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi.FullName, currentcmdrid); if (reader.ID == 0) // if not present, add to commit add list { tlutoadd.Add(reader.TravelLogUnit); } if (forceReload) // Force a reload of the travel log { reader.Pos = 0; } if (reader.Pos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } if (tlutoadd.Count > 0) // now, on spinning rust, this takes ages for 600+ log files first time, so transaction it { UserDatabase.Instance.ExecuteWithDatabase(cn => { using (DbTransaction txn = cn.Connection.BeginTransaction()) { foreach (var tlu in tlutoadd) { tlu.Add(cn.Connection, txn); } txn.Commit(); } }); } for (int i = 0; i < readersToUpdate.Count; i++) { UserDatabase.Instance.ExecuteWithDatabase(cn => { int ji = 0; NetLogFileReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.FullName); var systems = JournalEntry.GetAllByTLU(reader.ID, cn.Connection).OfType <JournalLocOrJump>().ToList(); var last = systems.LastOrDefault(); // find last system recorded for this TLU, may be null if no systems.. using (DbTransaction tn = cn.Connection.BeginTransaction()) { var ienum = reader.ReadSystems(last, cancelRequested, currentcmdrid); System.Diagnostics.Debug.WriteLine("Scanning TLU " + reader.ID + " " + reader.FullName); foreach (JObject jo in ienum) { jo["EDDMapColor"] = defaultMapColour; JournalLocOrJump je = new JournalFSDJump(jo); je.SetTLUCommander(reader.TravelLogUnit.ID, currentcmdrid); while (ji < vsSystemsEnts.Count && vsSystemsEnts[ji].EventTimeUTC < je.EventTimeUTC) { ji++; // move to next entry which is bigger in time or equal to ours. } JournalLocOrJump prev = (ji > 0 && (ji - 1) < vsSystemsEnts.Count) ? vsSystemsEnts[ji - 1] : null; JournalLocOrJump next = ji < vsSystemsEnts.Count ? vsSystemsEnts[ji] : null; bool previssame = (prev != null && prev.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!prev.HasCoordinate || !je.HasCoordinate || (prev.StarPos - je.StarPos).LengthSquared < 0.01)); bool nextissame = (next != null && next.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!next.HasCoordinate || !je.HasCoordinate || (next.StarPos - je.StarPos).LengthSquared < 0.01)); // System.Diagnostics.Debug.WriteLine("{0} {1} {2}", ji, vsSystemsEnts[ji].EventTimeUTC, je.EventTimeUTC); if (!(previssame || nextissame)) { je.Add(jo, cn.Connection, tn); System.Diagnostics.Debug.WriteLine("Add {0} {1}", je.EventTimeUTC, jo.ToString()); } } reader.TravelLogUnit.Update(cn.Connection, tn); tn.Commit(); } if (updateProgress != null) { updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.FullName); } }); } }
public static HistoryList LoadHistory(EDJournalUIScanner journalmonitor, Func <bool> cancelRequested, Action <int, string> reportProgress, string NetLogPath = null, bool ForceNetLogReload = false, bool ForceJournalReload = false, int CurrentCommander = Int32.MinValue, int fullhistoryloaddaylimit = 0, string essentialitems = "" ) { HistoryList hist = new HistoryList(); if (CurrentCommander >= 0) { journalmonitor.SetupWatchers(); // Parse files stop monitor.. int forcereloadoflastn = ForceJournalReload ? int.MaxValue / 2 : 0; // if forcing a reload, we indicate that by setting the reload count to a very high value, but not enough to cause int wrap journalmonitor.ParseJournalFilesOnWatchers((p, s) => reportProgress(p, s), forcereloadoflastn); if (NetLogPath != null) { string errstr = null; NetLogClass.ParseFiles(NetLogPath, out errstr, EDCommander.Current.MapColour, () => cancelRequested(), (p, s) => reportProgress(p, s), ForceNetLogReload, currentcmdrid: CurrentCommander); } } Trace.WriteLine(BaseUtils.AppTicks.TickCountLap() + " Files read "); reportProgress(-1, "Reading Database"); List <JournalEntry> jlist; // returned in date ascending, oldest first order. System.Diagnostics.Debug.WriteLine(BaseUtils.AppTicks.TickCountLapDelta("HLL", true) + "History Load"); if (fullhistoryloaddaylimit > 0) { var list = (essentialitems == nameof(JournalEssentialEvents.JumpScanEssentialEvents)) ? JournalEssentialEvents.JumpScanEssentialEvents : (essentialitems == nameof(JournalEssentialEvents.JumpEssentialEvents)) ? JournalEssentialEvents.JumpEssentialEvents : (essentialitems == nameof(JournalEssentialEvents.NoEssentialEvents)) ? JournalEssentialEvents.NoEssentialEvents : (essentialitems == nameof(JournalEssentialEvents.FullStatsEssentialEvents)) ? JournalEssentialEvents.FullStatsEssentialEvents : JournalEssentialEvents.EssentialEvents; jlist = JournalEntry.GetAll(CurrentCommander, ids: list, allidsafterutc: DateTime.UtcNow.Subtract(new TimeSpan(fullhistoryloaddaylimit, 0, 0, 0)) ); } else { jlist = JournalEntry.GetAll(CurrentCommander); } System.Diagnostics.Debug.WriteLine(BaseUtils.AppTicks.TickCountLapDelta("HLL") + "History Load END"); Trace.WriteLine(BaseUtils.AppTicks.TickCountLap() + " Database read " + jlist.Count); HistoryEntry hprev = null; JournalEntry jprev = null; reportProgress(-1, "Creating History"); Stopwatch sw = new Stopwatch(); sw.Start(); foreach (JournalEntry je in jlist) { if (MergeEntries(jprev, je)) // if we merge, don't store into HE { continue; } // Clean up "UnKnown" systems from EDSM log if (je is JournalFSDJump && ((JournalFSDJump)je).StarSystem == "UnKnown") { JournalEntry.Delete(je.Id); continue; } if (je is EliteDangerousCore.JournalEvents.JournalMusic) // remove music.. not shown.. now UI event. remove it for backwards compatibility { //System.Diagnostics.Debug.WriteLine("**** Filter out " + je.EventTypeStr + " on " + je.EventTimeLocal.ToString()); continue; } long timetoload = sw.ElapsedMilliseconds; HistoryEntry he = HistoryEntry.FromJournalEntry(je, hprev); // **** REMEMBER NEW Journal entry needs this too ***************** he.UpdateMaterialsCommodities(je, hprev?.MaterialCommodity); // update material commodities Debug.Assert(he.MaterialCommodity != null); if (CheckForRemoval(he, hprev)) // check here to see if we want to remove the entry.. can move this lower later, but at first point where we have the data { continue; } he.UpdateStats(je, hprev?.Stats, he.StationFaction); he.UpdateSystemNote(); hist.CashLedger.Process(je); // update the ledger he.Credits = hist.CashLedger.CashTotal; hist.Shipyards.Process(je); hist.Outfitting.Process(je); Tuple <ShipInformation, ModulesInStore> ret = hist.ShipInformationList.Process(je, he.WhereAmI, he.System); // the ships he.UpdateShipInformation(ret.Item1); he.UpdateShipStoredModules(ret.Item2); he.UpdateMissionList(hist.missionlistaccumulator.Process(je, he.System, he.WhereAmI)); hist.historylist.Add(he); // now add it to the history AddToVisitsScan(hist, he, null); // add to scan but don't complain if can't add. Do this AFTER add, as it uses the history list hprev = he; jprev = je; } //for (int i = hist.Count - 10; i < hist.Count; i++) System.Diagnostics.Debug.WriteLine("Hist {0} {1} {2}", hist[i].EventTimeUTC, hist[i].Indexno , hist[i].EventSummary); // now database has been updated due to initial fill, now fill in stuff which needs the user database hist.CommanderId = CurrentCommander; EDCommander.Current.FID = hist.GetCommanderFID(); // ensure FID is set.. the other place it gets changed is a read of LoadGame. if (NetLogPath != null) { reportProgress(-1, "Netlog Updating System Positions"); hist.FillInPositionsFSDJumps(); // if netlog reading, try and resolve systems.. } reportProgress(-1, "Done"); return(hist); }