static public List <JournalEntry> GetAll(int commander = -999, DateTime?afterutc = null, DateTime?beforeutc = null, JournalTypeEnum[] ids = null, DateTime?allidsafterutc = null) { Dictionary <long, TravelLogUnit> tlus = TravelLogUnit.GetAll().ToDictionary(t => t.id); DbCommand cmd = null; DbDataReader reader = null; List <JournalEntry> entries = new List <JournalEntry>(); try { cmd = UserDatabase.Instance.ExecuteWithDatabase(cn => cn.Connection.CreateCommand("select * from JournalEntries")); reader = UserDatabase.Instance.ExecuteWithDatabase(cn => { string cnd = ""; if (commander != -999) { cnd = cnd.AppendPrePad("CommanderID = @commander", " and "); cmd.AddParameterWithValue("@commander", commander); } if (afterutc != null) { cnd = cnd.AppendPrePad("EventTime >= @after", " and "); cmd.AddParameterWithValue("@after", afterutc.Value); } if (beforeutc != null) { cnd = cnd.AppendPrePad("EventTime <= @before", " and "); cmd.AddParameterWithValue("@before", beforeutc.Value); } if (ids != null) { int[] array = Array.ConvertAll(ids, x => (int)x); if (allidsafterutc != null) { cmd.AddParameterWithValue("@idafter", allidsafterutc.Value); cnd = cnd.AppendPrePad("(EventTypeId in (" + string.Join(",", array) + ") Or EventTime>=@idafter)", " and "); } else { cnd = cnd.AppendPrePad("EventTypeId in (" + string.Join(",", array) + ")", " and "); } } if (cnd.HasChars()) { cmd.CommandText += " where " + cnd; } cmd.CommandText += " Order By EventTime ASC"; return(cmd.ExecuteReader()); }); List <JournalEntry> retlist = null; do { retlist = UserDatabase.Instance.ExecuteWithDatabase(cn => { List <JournalEntry> list = new List <JournalEntry>(); while (list.Count < 1000 && reader.Read()) { JournalEntry sys = JournalEntry.CreateJournalEntry(reader); sys.beta = tlus.ContainsKey(sys.TLUId) ? tlus[sys.TLUId].Beta : false; list.Add(sys); } return(list); }); entries.AddRange(retlist); }while (retlist != null && retlist.Count != 0); return(entries); } finally { if (reader != null || cmd != null) { UserDatabase.Instance.ExecuteWithDatabase(cn => { reader?.Close(); cmd?.Dispose(); }); } } }
public void ParseJournalFiles(Func <bool> cancelRequested, Action <int, string> updateProgress, bool forceReload = false) { // System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF", true), "Scanned " + WatcherFolder); Dictionary <string, TravelLogUnit> m_travelogUnits = TravelLogUnit.GetAll().Where(t => (t.type & TravelLogUnit.TypeMask) == TravelLogUnit.JournalType).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(WatcherFolder, journalfilematch, SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <EDJournalReader> readersToUpdate = new List <EDJournalReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, m_travelogUnits); // open it if (!m_travelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { m_travelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.type = TravelLogUnit.JournalType; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } //System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF"), "Ready to update"); for (int i = 0; i < readersToUpdate.Count; i++) { EDJournalReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); //System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF"), i + " read "); reader.ReadJournal(out List <JournalReaderEntry> entries, out List <UIEvent> uievents, historyrefreshparsing: true, resetOnError: true); // this may create new commanders, and may write to the TLU db UserDatabase.Instance.ExecuteWithDatabase(cn => { if (entries.Count > 0) { ILookup <DateTime, JournalEntry> existing = JournalEntry.GetAllByTLU(reader.TravelLogUnit.id, cn.Connection).ToLookup(e => e.EventTimeUTC); //System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF"), i + " into db"); using (DbTransaction tn = cn.Connection.BeginTransaction()) { foreach (JournalReaderEntry jre in entries) { if (!existing[jre.JournalEntry.EventTimeUTC].Any(e => JournalEntry.AreSameEntry(jre.JournalEntry, e, cn.Connection, ent1jo: jre.Json))) { jre.JournalEntry.Add(jre.Json, cn.Connection, tn); //System.Diagnostics.Trace.WriteLine(string.Format("Write Journal to db {0} {1}", jre.JournalEntry.EventTimeUTC, jre.JournalEntry.EventTypeStr)); } } tn.Commit(); } } reader.TravelLogUnit.Update(cn.Connection); updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); lastnfi = reader; }); } updateProgress(-1, ""); }
// Primary method to fill historylist // Get All journals matching parameters. // if callback set, then each JE is passed back thru callback and not accumulated. Callback is in a thread. Callback can stop the accumulation if it returns false static public List <JournalEntry> GetAll(int commander = -999, DateTime?startdateutc = null, DateTime?enddateutc = null, JournalTypeEnum[] ids = null, DateTime?allidsafterutc = null, Func <JournalEntry, Object, bool> callback = null, Object callbackobj = null, int chunksize = 1000) { var tluslist = TravelLogUnit.GetAll(); Dictionary <long, TravelLogUnit> tlus = tluslist.ToDictionary(t => t.ID); DbCommand cmd = null; DbDataReader reader = null; List <JournalEntry> entries = new List <JournalEntry>(); try { cmd = UserDatabase.Instance.ExecuteWithDatabase(cn => cn.Connection.CreateCommand("select Id,TravelLogId,CommanderId,EventData,Synced from JournalEntries")); reader = UserDatabase.Instance.ExecuteWithDatabase(cn => { string cnd = ""; if (commander != -999) { cnd = cnd.AppendPrePad("CommanderID = @commander", " and "); cmd.AddParameterWithValue("@commander", commander); } if (startdateutc != null) { cnd = cnd.AppendPrePad("EventTime >= @after", " and "); cmd.AddParameterWithValue("@after", startdateutc.Value); } if (enddateutc != null) { cnd = cnd.AppendPrePad("EventTime <= @before", " and "); cmd.AddParameterWithValue("@before", enddateutc.Value); } if (ids != null) { int[] array = Array.ConvertAll(ids, x => (int)x); if (allidsafterutc != null) { cmd.AddParameterWithValue("@idafter", allidsafterutc.Value); cnd = cnd.AppendPrePad("(EventTypeId in (" + string.Join(",", array) + ") Or EventTime>=@idafter)", " and "); } else { cnd = cnd.AppendPrePad("EventTypeId in (" + string.Join(",", array) + ")", " and "); } } if (cnd.HasChars()) { cmd.CommandText += " where " + cnd; } cmd.CommandText += " Order By EventTime,Id ASC"; return(cmd.ExecuteReader()); }); List <JournalEntry> retlist = null; #if TIMESCAN Dictionary <string, List <long> > times = new Dictionary <string, List <long> >(); Stopwatch sw = new Stopwatch(); sw.Start(); #endif do { // experiments state that reading the DL is not the time sink, its creating the journal entries retlist = UserDatabase.Instance.ExecuteWithDatabase(cn => // split into smaller chunks to allow other things access.. { List <JournalEntry> list = new List <JournalEntry>(); while (list.Count < chunksize && reader.Read()) { #if TIMESCAN long t = sw.ElapsedTicks; #endif JournalEntry je = JournalEntry.CreateJournalEntryFixedPos(reader); je.beta = tlus.ContainsKey(je.TLUId) ? tlus[je.TLUId].Beta : false; list.Add(je); #if TIMESCAN long tw = sw.ElapsedTicks - t; if (!times.TryGetValue(sys.EventTypeStr, out var x)) { times[sys.EventTypeStr] = new List <long>(); } times[sys.EventTypeStr].Add(tw); #endif } return(list); }); if (callback != null) // collated, now process them, if callback, feed them thru callback procedure { foreach (var e in retlist) { if (!callback.Invoke(e, callbackobj)) // if indicate stop { retlist = null; break; } } } else { entries.AddRange(retlist); } }while (retlist != null && retlist.Count != 0); #if TIMESCAN List <Results> res = new List <Results>(); foreach (var kvp in times) { Results r = new Results(); r.name = kvp.Key; r.avg = kvp.Value.Average(); r.min = kvp.Value.Min(); r.max = kvp.Value.Max(); r.total = kvp.Value.Sum(); r.avgtime = ((double)r.avg / Stopwatch.Frequency * 1000); r.sumtime = ((double)r.total / Stopwatch.Frequency * 1000); r.count = kvp.Value.Count; res.Add(r); } //res.Sort(delegate (Results l, Results r) { return l.sumtime.CompareTo(r.sumtime); }); res.Sort(delegate(Results l, Results r) { return(l.avgtime.CompareTo(r.avgtime)); }); string rs = ""; foreach (var r in res) { rs = rs + Environment.NewLine + string.Format("Time {0} min {1} max {2} avg {3} ms count {4} totaltime {5} ms", r.name, r.min, r.max, r.avgtime.ToString("#.#########"), r.count, r.sumtime.ToString("#.#######")); } System.Diagnostics.Trace.WriteLine(rs); //File.WriteAllText(@"c:\code\times.txt", rs); #endif } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Getall Exception " + ex); } finally { if (reader != null || cmd != null) { UserDatabase.Instance.ExecuteWithDatabase(cn => { reader?.Close(); cmd?.Dispose(); }); } } return(entries); }
public void ParseJournalFiles(Func <bool> cancelRequested, Action <int, string> updateProgress, bool forceReload = false) { System.Diagnostics.Trace.WriteLine("Scanned " + m_watcherfolder); Dictionary <string, TravelLogUnit> m_travelogUnits = TravelLogUnit.GetAll().Where(t => (t.type & 0xFF) == 3).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(m_watcherfolder, "Journal.*.log", SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <EDJournalReader> readersToUpdate = new List <EDJournalReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, m_travelogUnits); if (!m_travelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { m_travelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.type = 3; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } for (int i = 0; i < readersToUpdate.Count; i++) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { EDJournalReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); List <JournalReaderEntry> entries = reader.ReadJournalLog(true).ToList(); // this may create new commanders, and may write to the TLU db ILookup <DateTime, JournalEntry> existing = JournalEntry.GetAllByTLU(reader.TravelLogUnit.id).ToLookup(e => e.EventTimeUTC); using (DbTransaction tn = cn.BeginTransaction()) { foreach (JournalReaderEntry jre in entries) { if (!existing[jre.JournalEntry.EventTimeUTC].Any(e => JournalEntry.AreSameEntry(jre.JournalEntry, e, ent1jo: jre.Json))) { System.Diagnostics.Trace.WriteLine(string.Format("Write Journal to db {0} {1}", jre.JournalEntry.EventTimeUTC, jre.JournalEntry.EventTypeStr)); jre.JournalEntry.Add(jre.Json, cn, tn); } } tn.Commit(); } reader.TravelLogUnit.Update(cn); updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); lastnfi = reader; } } updateProgress(-1, ""); }
// ordered in time, id order, ascending, oldest first static public List <JournalEntry> GetAll(int commander = -999, DateTime?afterutc = null, DateTime?beforeutc = null, JournalTypeEnum[] ids = null, DateTime?allidsafterutc = null) { var tluslist = TravelLogUnit.GetAll(); Dictionary <long, TravelLogUnit> tlus = tluslist.ToDictionary(t => t.ID); DbCommand cmd = null; DbDataReader reader = null; List <JournalEntry> entries = new List <JournalEntry>(); try { cmd = UserDatabase.Instance.ExecuteWithDatabase(cn => cn.Connection.CreateCommand("select * from JournalEntries")); reader = UserDatabase.Instance.ExecuteWithDatabase(cn => { string cnd = ""; if (commander != -999) { cnd = cnd.AppendPrePad("CommanderID = @commander", " and "); cmd.AddParameterWithValue("@commander", commander); } if (afterutc != null) { cnd = cnd.AppendPrePad("EventTime >= @after", " and "); cmd.AddParameterWithValue("@after", afterutc.Value); } if (beforeutc != null) { cnd = cnd.AppendPrePad("EventTime <= @before", " and "); cmd.AddParameterWithValue("@before", beforeutc.Value); } if (ids != null) { int[] array = Array.ConvertAll(ids, x => (int)x); if (allidsafterutc != null) { cmd.AddParameterWithValue("@idafter", allidsafterutc.Value); cnd = cnd.AppendPrePad("(EventTypeId in (" + string.Join(",", array) + ") Or EventTime>=@idafter)", " and "); } else { cnd = cnd.AppendPrePad("EventTypeId in (" + string.Join(",", array) + ")", " and "); } } if (cnd.HasChars()) { cmd.CommandText += " where " + cnd; } cmd.CommandText += " Order By EventTime,Id ASC"; return(cmd.ExecuteReader()); }); List <JournalEntry> retlist = null; do { // experiments state that reading the DL takes 270/4000ms, reading json -> 1250, then the rest is creating and decoding the fields // not much scope to improve it outside of the core json speed. retlist = UserDatabase.Instance.ExecuteWithDatabase(cn => // split into smaller chunks to allow other things access.. { List <JournalEntry> list = new List <JournalEntry>(); while (list.Count < 1000 && reader.Read()) { JournalEntry sys = JournalEntry.CreateJournalEntry(reader); sys.beta = tlus.ContainsKey(sys.TLUId) ? tlus[sys.TLUId].Beta : false; list.Add(sys); } return(list); }); entries.AddRange(retlist); }while (retlist != null && retlist.Count != 0); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Getall Exception " + ex); } finally { if (reader != null || cmd != null) { UserDatabase.Instance.ExecuteWithDatabase(cn => { reader?.Close(); cmd?.Dispose(); }); } } return(entries); }
// called during start up and if refresh history is pressed public List <VisitedSystemsClass> ParseFiles(out string error, int defaultMapColour, Func <bool> cancelRequested, Action <int, string> updateProgress) { error = null; string datapath = GetNetLogPath(); if (datapath == null) { error = "Netlog directory not found!" + Environment.NewLine + "Specify location in settings tab"; return(null); } if (!Directory.Exists(datapath)) // if logfiles directory is not found { error = "Netlog directory is not present!" + Environment.NewLine + "Specify location in settings tab"; return(null); } List <VisitedSystemsClass> vsSystemsList = VisitedSystemsClass.GetAll(EDDConfig.Instance.CurrentCmdrID); List <VisitedSystemsClass> visitedSystems = new List <VisitedSystemsClass>(); Dictionary <string, TravelLogUnit> m_travelogUnits = TravelLogUnit.GetAll().Where(t => t.type == 1).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); Dictionary <string, List <VisitedSystemsClass> > vsc_lookup = VisitedSystemsClass.GetAll().GroupBy(v => v.Unit).ToDictionary(g => g.Key, g => g.ToList()); if (vsSystemsList != null) { foreach (VisitedSystemsClass vs in vsSystemsList) { if (visitedSystems.Count == 0) { visitedSystems.Add(vs); } else if (!visitedSystems.Last <VisitedSystemsClass>().Name.Equals(vs.Name)) // Avoid duplicate if times exist in same system from different files. { visitedSystems.Add(vs); } else { VisitedSystemsClass vs2 = (VisitedSystemsClass)visitedSystems.Last <VisitedSystemsClass>(); if (vs2.id != vs.id) { vs.Commander = -2; // Move to dupe user vs.Update(); } } } } // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(datapath, "netLog.*.log", SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <NetLogFileReader> readersToUpdate = new List <NetLogFileReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, m_travelogUnits, vsc_lookup); if (!m_travelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { m_travelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = lastnfi; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } using (SQLiteConnectionED cn = new SQLiteConnectionED()) { for (int i = 0; i < readersToUpdate.Count; i++) { NetLogFileReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); using (DbTransaction tn = cn.BeginTransaction()) { foreach (VisitedSystemsClass ps in reader.ReadSystems(cancelRequested)) { ps.EDSM_sync = false; ps.MapColour = defaultMapColour; ps.Commander = EDDConfig.Instance.CurrentCmdrID; ps.Add(cn, tn); visitedSystems.Add(ps); } reader.TravelLogUnit.Update(cn, tn); tn.Commit(); } if (updateProgress != null) { updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); } lastnfi = reader; } } return(visitedSystems); }
private void NetLogMain() { try { m_Watcher = new System.IO.FileSystemWatcher(); if (Directory.Exists(GetNetLogPath())) { m_Watcher.Path = GetNetLogPath() + "\\"; m_Watcher.Filter = "netLog*.log"; m_Watcher.IncludeSubdirectories = true; m_Watcher.NotifyFilter = NotifyFilters.FileName; // | NotifyFilters.Size; m_Watcher.Changed += new FileSystemEventHandler(OnChanged); m_Watcher.Created += new FileSystemEventHandler(OnChanged); m_Watcher.Deleted += new FileSystemEventHandler(OnChanged); m_Watcher.EnableRaisingEvents = true; } } catch (Exception ex) { System.Windows.Forms.MessageBox.Show("Net log watcher exception : " + ex.Message, "EDDiscovery Error"); System.Diagnostics.Trace.WriteLine("NetlogMAin exception : " + ex.Message); System.Diagnostics.Trace.WriteLine(ex.StackTrace); } List <TravelLogUnit> travelogUnits; // Get TravelLogUnits; travelogUnits = null; TravelLogUnit tlUnit = null; SQLiteDBClass db = new SQLiteDBClass(); int ii = 0; while (!Exit) { try { ii++; Thread.Sleep(2000); EliteDangerous.CheckED(); if (NoEvents == false) { if (lastnfi != null) { FileInfo fi = new FileInfo(lastnfi.FileName); if (fi.Length != lastnfi.fileSize || ii % 5 == 0) { if (tlUnit == null || !tlUnit.Name.Equals(Path.GetFileName(lastnfi.FileName))) // Create / find new travellog unit { travelogUnits = TravelLogUnit.GetAll(); // Check if we alreade have parse the file and stored in DB. if (tlUnit == null) { tlUnit = (from c in travelogUnits where c.Name == fi.Name select c).FirstOrDefault <TravelLogUnit>(); } if (tlUnit == null) { tlUnit = new TravelLogUnit(); tlUnit.Name = fi.Name; tlUnit.Path = Path.GetDirectoryName(fi.FullName); tlUnit.Size = 0; // Add real size after data is in DB //;(int)fi.Length; tlUnit.type = 1; tlUnit.Add(); travelogUnits.Add(tlUnit); } } int nrsystems = visitedSystems.Count; ParseFile(fi, visitedSystems); if (nrsystems < visitedSystems.Count) // Om vi har fler system { System.Diagnostics.Trace.WriteLine("New systems " + nrsystems.ToString() + ":" + visitedSystems.Count.ToString()); for (int nr = nrsystems; nr < visitedSystems.Count; nr++) // Lägg till nya i locala databaslogen { VisitedSystemsClass dbsys = new VisitedSystemsClass(); dbsys.Name = visitedSystems[nr].Name; dbsys.Time = visitedSystems[nr].time; dbsys.Source = tlUnit.id; dbsys.EDSM_sync = false; dbsys.Unit = fi.Name; dbsys.MapColour = db.GetSettingInt("DefaultMap", Color.Red.ToArgb()); dbsys.Unit = fi.Name; dbsys.Commander = 0; if (!tlUnit.Beta) // dont store history in DB for beta (YET) { dbsys.Add(); } visitedSystems[nr].vs = dbsys; } } else { //System.Diagnostics.Trace.WriteLine("No change"); } } } } } catch (Exception ex) { System.Diagnostics.Trace.WriteLine("NetlogMAin exception : " + ex.Message); System.Diagnostics.Trace.WriteLine(ex.StackTrace); } } }
public List <SystemPosition> ParseFiles(RichTextBox richTextBox_History, int defaultMapColour) { string datapath; DirectoryInfo dirInfo; datapath = GetNetLogPath(); if (datapath == null) { AppendText(richTextBox_History, "Netlog directory not found!" + Environment.NewLine + "Specify location in settings tab" + Environment.NewLine, Color.Red); return(null); } if (!Directory.Exists(datapath)) // if logfiles directory is not found { if (richTextBox_History != null) { richTextBox_History.Clear(); AppendText(richTextBox_History, "Netlog directory not found!" + Environment.NewLine + "Specify location in settings tab" + Environment.NewLine, Color.Red); //MessageBox.Show("Netlog directory not found!" + Environment.NewLine + "Specify location in settings tab", "EDDiscovery Error", MessageBoxButtons.OK); } return(null); } try { dirInfo = new DirectoryInfo(datapath); } catch (Exception ex) { AppendText(richTextBox_History, "Could not create Directory info: " + ex.Message + Environment.NewLine, Color.Red); return(null); } // Get TravelLogUnits; tlUnits = TravelLogUnit.GetAll(); List <VisitedSystemsClass> vsSystemsList = VisitedSystemsClass.GetAll(ActiveCommander); visitedSystems.Clear(); // Add systems in local DB. if (vsSystemsList != null) { foreach (VisitedSystemsClass vs in vsSystemsList) { if (visitedSystems.Count == 0) { visitedSystems.Add(new SystemPosition(vs)); } else if (!visitedSystems.Last <SystemPosition>().Name.Equals(vs.Name)) // Avoid duplicate if times exist in same system from different files. { visitedSystems.Add(new SystemPosition(vs)); } else { VisitedSystemsClass vs2 = (VisitedSystemsClass)visitedSystems.Last <SystemPosition>().vs; vs.Commander = -2; // Move to dupe user vs.Update(); } } } FileInfo[] allFiles = dirInfo.GetFiles("netLog.*.log", SearchOption.AllDirectories).OrderBy(p => p.Name).ToArray(); NoEvents = true; foreach (FileInfo fi in allFiles) { TravelLogUnit lu = null; bool parsefile = true; if (fi.Name.Equals("netLog.1510280152.01.log")) { parsefile = true; } // Check if we alreade have parse the file and stored in DB. if (tlUnits != null) { lu = (from c in tlUnits where c.Name == fi.Name select c).FirstOrDefault <TravelLogUnit>(); } if (lu != null) { if (lu.Size == fi.Length) // File is already in DB: { parsefile = false; } } else { lu = new TravelLogUnit(); lu.Name = fi.Name; lu.Path = Path.GetDirectoryName(fi.FullName); lu.Size = 0; // Add real size after data is in DB //;(int)fi.Length; lu.type = 1; lu.Add(); } if (parsefile) { int nr = 0; List <SystemPosition> tempVisitedSystems = new List <SystemPosition>(); ParseFile(fi, tempVisitedSystems); foreach (SystemPosition ps in tempVisitedSystems) { SystemPosition ps2; ps2 = (from c in visitedSystems where c.Name == ps.Name && c.time == ps.time select c).FirstOrDefault <SystemPosition>(); if (ps2 == null) { VisitedSystemsClass dbsys = new VisitedSystemsClass(); dbsys.Name = ps.Name; dbsys.Time = ps.time; dbsys.Source = lu.id; dbsys.EDSM_sync = false; dbsys.Unit = fi.Name; dbsys.MapColour = defaultMapColour; dbsys.Commander = ActiveCommander; if (!lu.Beta) // dont store history in DB for beta (YET) { VisitedSystemsClass last = VisitedSystemsClass.GetLast(); if (last == null || !last.Name.Equals(dbsys.Name)) // If same name as last system. Dont Add. otherwise we get a duplet with last from logfile before with different time. { if (!VisitedSystemsClass.Exist(dbsys.Name, dbsys.Time)) { dbsys.Add(); visitedSystems.Add(ps); nr++; } } } } } lu.Size = (int)fi.Length; lu.Update(); AppendText(richTextBox_History, fi.Name + " " + nr.ToString() + " added to local database." + Environment.NewLine, Color.Black); } } NoEvents = false; //var result = visitedSystems.OrderByDescending(a => a.time).ToList<SystemPosition>(); return(visitedSystems); }
static public void ParseFiles(string datapath, out string error, int defaultMapColour, Func <bool> cancelRequested, Action <int, string> updateProgress, bool forceReload = false, Dictionary <string, NetLogFileReader> netlogreaders = null, int currentcmdrid = -1) { error = null; if (datapath == null) { error = "Netlog directory not set!"; return; } if (!Directory.Exists(datapath)) // if logfiles directory is not found { error = "Netlog directory is not present!"; return; } if (netlogreaders == null) { netlogreaders = new Dictionary <string, NetLogFileReader>(); } if (currentcmdrid < 0) { currentcmdrid = EDCommander.CurrentCmdrID; } // TLUs List <TravelLogUnit> tlus = TravelLogUnit.GetAll(); Dictionary <string, TravelLogUnit> netlogtravelogUnits = tlus.Where(t => t.type == 1).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); Dictionary <long, string> travellogunitid2name = netlogtravelogUnits.Values.ToDictionary(t => t.id, t => t.Name); Dictionary <string, List <JournalLocOrJump> > vsc_lookup = JournalEntry.GetAll().OfType <JournalLocOrJump>().GroupBy(v => v.TLUId).Where(g => travellogunitid2name.ContainsKey(g.Key)).ToDictionary(g => travellogunitid2name[g.Key], g => g.ToList()); // list of systems in journal, sorted by time List <JournalLocOrJump> vsSystemsEnts = JournalEntry.GetAll(currentcmdrid).OfType <JournalLocOrJump>().OrderBy(j => j.EventTimeUTC).ToList(); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(datapath, "netLog.*.log", SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <NetLogFileReader> readersToUpdate = new List <NetLogFileReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, netlogtravelogUnits, vsc_lookup, netlogreaders); if (!netlogtravelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { netlogtravelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } for (int i = 0; i < readersToUpdate.Count; i++) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { int ji = 0; NetLogFileReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); using (DbTransaction tn = cn.BeginTransaction()) { foreach (JObject jo in reader.ReadSystems(cancelRequested, currentcmdrid)) { jo["EDDMapColor"] = defaultMapColour; JournalLocOrJump je = new JournalFSDJump(jo); je.SetTLUCommander(reader.TravelLogUnit.id, currentcmdrid); while (ji < vsSystemsEnts.Count && vsSystemsEnts[ji].EventTimeUTC < je.EventTimeUTC) { ji++; // move to next entry which is bigger in time or equal to ours. } JournalLocOrJump prev = (ji > 0 && (ji - 1) < vsSystemsEnts.Count) ? vsSystemsEnts[ji - 1] : null; JournalLocOrJump next = ji < vsSystemsEnts.Count ? vsSystemsEnts[ji] : null; bool previssame = (prev != null && prev.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!prev.HasCoordinate || !je.HasCoordinate || (prev.StarPos - je.StarPos).LengthSquared < 0.01)); bool nextissame = (next != null && next.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!next.HasCoordinate || !je.HasCoordinate || (next.StarPos - je.StarPos).LengthSquared < 0.01)); // System.Diagnostics.Debug.WriteLine("{0} {1} {2}", ji, vsSystemsEnts[ji].EventTimeUTC, je.EventTimeUTC); if (!(previssame || nextissame)) { je.Add(jo, cn, tn); System.Diagnostics.Debug.WriteLine("Add {0} {1}", je.EventTimeUTC, jo.ToString()); } } tn.Commit(); reader.TravelLogUnit.Update(); } if (updateProgress != null) { updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); } } } }
static public List <JournalEntry> GetAll(int commander = -999, DateTime?after = null, DateTime?before = null, JournalTypeEnum[] ids = null, DateTime?allidsafter = null) { Dictionary <long, TravelLogUnit> tlus = TravelLogUnit.GetAll().ToDictionary(t => t.id); List <JournalEntry> list = new List <JournalEntry>(); using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { using (DbCommand cmd = cn.CreateCommand("select * from JournalEntries")) { string cnd = ""; if (commander != -999) { cnd = cnd.AppendPrePad("CommanderID = @commander", " and "); cmd.AddParameterWithValue("@commander", commander); } if (after != null) { cnd = cnd.AppendPrePad("EventTime >= @after", " and "); cmd.AddParameterWithValue("@after", after.Value); } if (before != null) { cnd = cnd.AppendPrePad("EventTime <= @before", " and "); cmd.AddParameterWithValue("@before", before.Value); } if (ids != null) { int[] array = Array.ConvertAll(ids, x => (int)x); if (allidsafter != null) { cmd.AddParameterWithValue("@idafter", allidsafter.Value); cnd = cnd.AppendPrePad("(EventTypeId in (" + string.Join(",", array) + ") Or EventTime>=@idafter)", " and "); } else { cnd = cnd.AppendPrePad("EventTypeId in (" + string.Join(",", array) + ")", " and "); } } if (cnd.HasChars()) { cmd.CommandText += " where " + cnd; } cmd.CommandText += " Order By EventTime ASC"; using (DbDataReader reader = cmd.ExecuteReader()) { while (reader.Read()) { JournalEntry sys = JournalEntry.CreateJournalEntry(reader); sys.beta = tlus.ContainsKey(sys.TLUId) ? tlus[sys.TLUId].Beta : false; list.Add(sys); } } return(list); } } }