public static void ParseFiles(string datapath, out string error, int defaultMapColour, Func<bool> cancelRequested, Action<int, string> updateProgress, bool forceReload = false, Dictionary<string, NetLogFileReader> netlogreaders = null, int currentcmdrid = -1) { error = null; if (datapath == null) { error = "Netlog directory not set!"; return; } if (!Directory.Exists(datapath)) // if logfiles directory is not found { error = "Netlog directory is not present!"; return; } if (netlogreaders == null) { netlogreaders = new Dictionary<string, NetLogFileReader>(); } if (currentcmdrid < 0) { currentcmdrid = EDDConfig.Instance.CurrentCmdrID; } // TLUs List<TravelLogUnit> tlus = TravelLogUnit.GetAll(); Dictionary<string, TravelLogUnit> netlogtravelogUnits = tlus.Where(t => t.type == 1).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); Dictionary<long, string> travellogunitid2name = netlogtravelogUnits.Values.ToDictionary(t => t.id, t => t.Name); Dictionary<string, List<JournalLocOrJump>> vsc_lookup = JournalEntry.GetAll().OfType<JournalLocOrJump>().GroupBy(v => v.TLUId).Where(g => travellogunitid2name.ContainsKey(g.Key)).ToDictionary(g => travellogunitid2name[g.Key], g => g.ToList()); // list of systems in journal, sorted by time List<JournalLocOrJump> vsSystemsEnts = JournalEntry.GetAll(currentcmdrid).OfType<JournalLocOrJump>().OrderBy(j => j.EventTimeUTC).ToList(); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(datapath, "netLog.*.log", SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List<NetLogFileReader> readersToUpdate = new List<NetLogFileReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, netlogtravelogUnits, vsc_lookup, netlogreaders); if (!netlogtravelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { netlogtravelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } for (int i = 0; i < readersToUpdate.Count; i++) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { int ji = 0; NetLogFileReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); using (DbTransaction tn = cn.BeginTransaction()) { foreach (JObject jo in reader.ReadSystems(cancelRequested, currentcmdrid)) { jo["EDDMapColor"] = defaultMapColour; JournalLocOrJump je = new JournalFSDJump(jo) { TLUId = (int)reader.TravelLogUnit.id, CommanderId = currentcmdrid, }; while (ji < vsSystemsEnts.Count && vsSystemsEnts[ji].EventTimeUTC < je.EventTimeUTC) { ji++; // move to next entry which is bigger in time or equal to ours. } JournalLocOrJump prev = (ji > 0 && (ji - 1) < vsSystemsEnts.Count) ? vsSystemsEnts[ji - 1] : null; JournalLocOrJump next = ji < vsSystemsEnts.Count ? vsSystemsEnts[ji] : null; bool previssame = (prev != null && prev.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!prev.HasCoordinate || !je.HasCoordinate || (prev.StarPos - je.StarPos).LengthSquared < 0.01)); bool nextissame = (next != null && next.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!next.HasCoordinate || !je.HasCoordinate || (next.StarPos - je.StarPos).LengthSquared < 0.01)); // System.Diagnostics.Debug.WriteLine("{0} {1} {2}", ji, vsSystemsEnts[ji].EventTimeUTC, je.EventTimeUTC); if (!(previssame || nextissame)) { je.Add(cn, tn); System.Diagnostics.Debug.WriteLine("Add {0} {1}", je.EventTimeUTC, je.EventDataString); } } tn.Commit(); reader.TravelLogUnit.Update(); } if (updateProgress != null) { updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); } } } }
public static List<JournalEntry> ParseFiles(string datapath, out string error, int defaultMapColour, Func<bool> cancelRequested, Action<int, string> updateProgress, bool forceReload = false, Dictionary<string, NetLogFileReader> netlogreaders = null, int currentcmdrid = -1) { error = null; if (datapath == null) { error = "Netlog directory not set!"; return null; } if (!Directory.Exists(datapath)) // if logfiles directory is not found { error = "Netlog directory is not present!"; return null; } if (netlogreaders == null) { netlogreaders = new Dictionary<string, NetLogFileReader>(); } if (currentcmdrid < 0) { currentcmdrid = EDDConfig.Instance.CurrentCmdrID; } List<JournalLocOrJump> visitedSystems = new List<JournalLocOrJump>(); Dictionary<string, TravelLogUnit> m_travelogUnits = TravelLogUnit.GetAll().Where(t => t.type == 1).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); Dictionary<long, string> travellogunitid2name = m_travelogUnits.Values.ToDictionary(t => t.id, t => t.Name); Dictionary<string, List<JournalLocOrJump>> vsc_lookup = JournalEntry.GetAll().OfType<JournalLocOrJump>().GroupBy(v => v.TLUId).Where(g => travellogunitid2name.ContainsKey(g.Key)).ToDictionary(g => travellogunitid2name[g.Key], g => g.ToList()); HashSet<long> journalids = new HashSet<long>(m_travelogUnits.Values.Select(t => t.id).ToList()); List<JournalLocOrJump> vsSystemsList = JournalEntry.GetAll(currentcmdrid).OfType<JournalLocOrJump>().Where(j => journalids.Contains(j.TLUId)).ToList(); if (vsSystemsList != null) { foreach (JournalLocOrJump vs in vsSystemsList) { if (visitedSystems.Count == 0) visitedSystems.Add(vs); else if (!visitedSystems.Last().StarSystem.Equals(vs.StarSystem, StringComparison.CurrentCultureIgnoreCase)) // Avoid duplicate if times exist in same system from different files. visitedSystems.Add(vs); } } // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(datapath, "netLog.*.log", SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List<NetLogFileReader> readersToUpdate = new List<NetLogFileReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, m_travelogUnits, vsc_lookup, netlogreaders); if (!m_travelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { m_travelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } using (SQLiteConnectionUserUTC cn = new SQLiteConnectionUserUTC()) { for (int i = 0; i < readersToUpdate.Count; i++) { NetLogFileReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); using (DbTransaction tn = cn.BeginTransaction()) { foreach (JObject jo in reader.ReadSystems(cancelRequested, currentcmdrid)) { jo["EDDMapColor"] = defaultMapColour; JournalLocOrJump je = new JournalFSDJump(jo) { TLUId = (int)reader.TravelLogUnit.id, CommanderId = currentcmdrid, }; je.Add(cn, tn); visitedSystems.Add(je); } tn.Commit(); reader.TravelLogUnit.Update(); } if (updateProgress != null) { updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); } } } return visitedSystems.ToList<JournalEntry>(); }