// open a new file for watching, place it into the netlogreaders list private EDJournalReader OpenFileReader(FileInfo fi, bool delayadd = false) { EDJournalReader reader; TravelLogUnit tlu; //System.Diagnostics.Trace.WriteLine(string.Format("{0} Opening File {1}", Environment.TickCount % 10000, fi.FullName)); if (netlogreaders.ContainsKey(fi.Name)) { reader = netlogreaders[fi.Name]; } else if (TravelLogUnit.TryGet(fi.Name, out tlu)) { tlu.Path = fi.DirectoryName; reader = new EDJournalReader(tlu); netlogreaders[fi.Name] = reader; } else { reader = new EDJournalReader(fi.FullName); reader.TravelLogUnit.type = TravelLogUnit.JournalType; if (!delayadd) { reader.TravelLogUnit.Add(); } netlogreaders[fi.Name] = reader; } return(reader); }
// open a new file for watching, place it into the netlogreaders list. Always return a reader private EDJournalReader OpenFileReader(string filepath, bool delayadd = false) { EDJournalReader reader; //System.Diagnostics.Trace.WriteLine(string.Format("{0} Opening File {1}", Environment.TickCount % 10000, fi.FullName)); if (netlogreaders.ContainsKey(filepath)) // cache { reader = netlogreaders[filepath]; } else if (TravelLogUnit.TryGet(filepath, out TravelLogUnit tlu)) // from db { reader = new EDJournalReader(tlu); netlogreaders[filepath] = reader; } else { reader = new EDJournalReader(filepath); reader.TravelLogUnit.Type = TravelLogUnit.JournalType; if (!delayadd) { reader.TravelLogUnit.Add(); } netlogreaders[filepath] = reader; } return(reader); }
static public List <JournalEntry> GetAll(int commander = -999) { Dictionary <long, TravelLogUnit> tlus = TravelLogUnit.GetAll().ToDictionary(t => t.id); List <JournalEntry> list = new List <JournalEntry>(); using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { using (DbCommand cmd = cn.CreateCommand("select * from JournalEntries where CommanderID=@commander Order by EventTime ASC")) { if (commander == -999) { cmd.CommandText = "select * from JournalEntries Order by EventTime "; } cmd.AddParameterWithValue("@commander", commander); DataSet ds = SQLiteDBClass.SQLQueryText(cn, cmd); if (ds.Tables.Count == 0 || ds.Tables[0].Rows.Count == 0) { return(list); } foreach (DataRow dr in ds.Tables[0].Rows) { JournalEntry sys = JournalEntry.CreateJournalEntry(dr); sys.beta = tlus.ContainsKey(sys.TLUId) ? tlus[sys.TLUId].Beta : false; list.Add(sys); } return(list); } } }
// open a new file for watching, place it into the netlogreaders list private EDJournalReader OpenFileReader(FileInfo fi, Dictionary <string, TravelLogUnit> tlu_lookup = null) { EDJournalReader reader; TravelLogUnit tlu; //System.Diagnostics.Trace.WriteLine(string.Format("File Read {0}", fi.FullName)); if (netlogreaders.ContainsKey(fi.Name)) { reader = netlogreaders[fi.Name]; } else if (tlu_lookup != null && tlu_lookup.ContainsKey(fi.Name)) { tlu = tlu_lookup[fi.Name]; tlu.Path = fi.DirectoryName; reader = new EDJournalReader(tlu); netlogreaders[fi.Name] = reader; } else if (TravelLogUnit.TryGet(fi.Name, out tlu)) { tlu.Path = fi.DirectoryName; reader = new EDJournalReader(tlu); netlogreaders[fi.Name] = reader; } else { reader = new EDJournalReader(fi.FullName); netlogreaders[fi.Name] = reader; } return(reader); }
// open a new file for watching, place it into the netlogreaders list. Always return a reader private EDJournalReader OpenFileReader(string filepath, bool delayadd = false) { EDJournalReader reader; //System.Diagnostics.Trace.WriteLine(string.Format("{0} Opening File {1}", Environment.TickCount % 10000, fi.FullName)); if (netlogreaders.ContainsKey(filepath)) // cache { reader = netlogreaders[filepath]; } else if (TravelLogUnit.TryGet(filepath, out TravelLogUnit tlu)) // from db { reader = new EDJournalReader(tlu); netlogreaders[filepath] = reader; } else { reader = new EDJournalReader(filepath); reader.TravelLogUnit.Type = TravelLogUnit.JournalType; var filename = Path.GetFileName(filepath); if (filename.StartsWith("JournalBeta.", StringComparison.InvariantCultureIgnoreCase) || filename.StartsWith("JournalAlpha.", StringComparison.InvariantCultureIgnoreCase)) { reader.TravelLogUnit.Type |= TravelLogUnit.BetaMarker; } if (!delayadd) { reader.TravelLogUnit.Add(); } netlogreaders[filepath] = reader; } return(reader); }
public static List <JournalEntry> GetByEventType(JournalTypeEnum eventtype, int commanderid, DateTime start, DateTime stop) { Dictionary <long, TravelLogUnit> tlus = TravelLogUnit.GetAll().ToDictionary(t => t.id); List <JournalEntry> vsc = new List <JournalEntry>(); using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { using (DbCommand cmd = cn.CreateCommand("SELECT * FROM JournalEntries WHERE EventTypeID = @eventtype and CommanderID=@commander and EventTime >=@start and EventTime<=@Stop ORDER BY EventTime ASC")) { cmd.AddParameterWithValue("@eventtype", (int)eventtype); cmd.AddParameterWithValue("@commander", (int)commanderid); cmd.AddParameterWithValue("@start", start); cmd.AddParameterWithValue("@stop", stop); using (DbDataReader reader = cmd.ExecuteReader()) { while (reader.Read()) { JournalEntry je = CreateJournalEntry(reader); je.beta = tlus.ContainsKey(je.TLUId) ? tlus[je.TLUId].Beta : false; vsc.Add(je); } } } } return(vsc); }
private EDJournalReader OpenFileReader(FileInfo fi, Dictionary <string, TravelLogUnit> tlu_lookup = null) { EDJournalReader reader; TravelLogUnit tlu; //System.Diagnostics.Trace.WriteLine(string.Format("File Read {0}", fi.FullName)); if (netlogreaders.ContainsKey(fi.Name)) { reader = netlogreaders[fi.Name]; } else if (tlu_lookup != null && tlu_lookup.ContainsKey(fi.Name)) { tlu = tlu_lookup[fi.Name]; tlu.Path = fi.DirectoryName; reader = new EDJournalReader(tlu); netlogreaders[fi.Name] = reader; } else if (TravelLogUnit.TryGet(fi.Name, out tlu)) { tlu.Path = fi.DirectoryName; reader = new EDJournalReader(tlu); netlogreaders[fi.Name] = reader; } else { reader = new EDJournalReader(fi.FullName); #if false // Bring over the commander from the previous log if possible Match match = journalNamePrefixRe.Match(fi.Name); if (match.Success) { string prefix = match.Groups["prefix"].Value; string partstr = match.Groups["part"].Value; int part; if (Int32.TryParse(partstr, NumberStyles.Integer, CultureInfo.InvariantCulture, out part) && part > 1) { //EDCommander lastcmdr = EDDConfig.Instance.CurrentCommander; var lastreader = netlogreaders.Where(kvp => kvp.Key.StartsWith(prefix, StringComparison.InvariantCultureIgnoreCase)) .Select(k => k.Value) .FirstOrDefault(); //if (lastreader != null) //{ //lastcmdr = lastreader.Commander; //} //reader.Commander = lastcmdr; } } #endif netlogreaders[fi.Name] = reader; } return(reader); }
public static List <JournalEntry> GetByEventType(JournalTypeEnum eventtype, int commanderid, DateTime startutc, DateTime stoputc) { Dictionary <long, TravelLogUnit> tlus = TravelLogUnit.GetAll().ToDictionary(t => t.id); DbCommand cmd = null; DbDataReader reader = null; List <JournalEntry> entries = new List <JournalEntry>(); try { cmd = UserDatabase.Instance.ExecuteWithDatabase(cn => cn.Connection.CreateCommand("SELECT * FROM JournalEntries WHERE EventTypeID = @eventtype and CommanderID=@commander and EventTime >=@start and EventTime<=@Stop ORDER BY EventTime ASC")); reader = UserDatabase.Instance.ExecuteWithDatabase(cn => { cmd.AddParameterWithValue("@eventtype", (int)eventtype); cmd.AddParameterWithValue("@commander", (int)commanderid); cmd.AddParameterWithValue("@start", startutc); cmd.AddParameterWithValue("@stop", stoputc); return(cmd.ExecuteReader()); }); List <JournalEntry> retlist = null; do { retlist = UserDatabase.Instance.ExecuteWithDatabase(cn => { List <JournalEntry> vsc = new List <JournalEntry>(); while (vsc.Count < 1000 && reader.Read()) { JournalEntry je = CreateJournalEntry(reader); je.beta = tlus.ContainsKey(je.TLUId) ? tlus[je.TLUId].Beta : false; vsc.Add(je); } return(vsc); }); entries.AddRange(retlist); }while (retlist != null && retlist.Count != 0); return(entries); } finally { if (reader != null || cmd != null) { UserDatabase.Instance.ExecuteWithDatabase(cn => { reader?.Close(); cmd?.Dispose(); }); } } }
public NetLogFileReader(TravelLogUnit tlu, List <VisitedSystemsClass> vsclist = null) : base(tlu) { if (vsclist != null) { systems = vsclist; } else { systems = VisitedSystemsClass.GetAll(tlu); } }
public LogReaderBase(string filename) { FileInfo fi = new FileInfo(filename); this.TravelLogUnit = new TravelLogUnit { Name = fi.Name, Path = fi.DirectoryName, Size = 0 }; }
public NetLogFileReader(TravelLogUnit tlu, List <JournalLocOrJump> vsclist = null) : base(tlu) { if (vsclist != null) { systems = vsclist; } else { systems = JournalEntry.GetAllByTLU(tlu.id).OfType <JournalLocOrJump>().ToList(); } }
public NetLogFileReader(TravelLogUnit tlu, List <JournalLocOrJump> vsclist = null) : base(tlu) { if (vsclist != null) { systems = vsclist; } else { UserDatabase.Instance.ExecuteWithDatabase(cn => { systems = JournalEntry.GetAllByTLU(tlu.id, cn.Connection).OfType <JournalLocOrJump>().ToList(); }); } }
private static NetLogFileReader OpenFileReader(string filepath, int cmdrid) { NetLogFileReader reader; if (TravelLogUnit.TryGet(filepath, out TravelLogUnit tlu)) { reader = new NetLogFileReader(tlu); } else { reader = new NetLogFileReader(filepath); reader.TravelLogUnit.Type = TravelLogUnit.NetLogType; reader.TravelLogUnit.CommanderId = cmdrid; } return(reader); }
public List <EDJournalReader> ScanJournalFiles(bool forceAllReload = false, bool forceLastReload = false) { // System.Diagnostics.Trace.WriteLine(BaseUtils.AppTicks.TickCountLap("PJF", true), "Scanned " + WatcherFolder); Dictionary <string, TravelLogUnit> m_travelogUnits = TravelLogUnit.GetAll().Where(t => (t.type & TravelLogUnit.TypeMask) == TravelLogUnit.JournalType).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(WatcherFolder, journalfilematch, SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <EDJournalReader> readersToUpdate = new List <EDJournalReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, m_travelogUnits); // open it if (!m_travelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { m_travelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.type = TravelLogUnit.JournalType; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } bool islast = (i == allFiles.Length - 1); if (forceAllReload || (islast && forceLastReload)) // Force a reload of the travel log { reader.TravelLogUnit.Size = 0; // by setting the start zero (reader.filePos is the same as Size) } if (reader.filePos != fi.Length || islast) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } return(readersToUpdate); }
internal static List <JournalEntry> GetAllByTLU(long tluid, SQLiteConnectionUser cn) { TravelLogUnit tlu = TravelLogUnit.Get(tluid); List <JournalEntry> vsc = new List <JournalEntry>(); using (DbCommand cmd = cn.CreateCommand("SELECT * FROM JournalEntries WHERE TravelLogId = @source ORDER BY EventTime ASC")) { cmd.AddParameterWithValue("@source", tluid); using (DbDataReader reader = cmd.ExecuteReader()) { while (reader.Read()) { JournalEntry je = CreateJournalEntry(reader); vsc.Add(je); } } } return(vsc); }
static internal List <JournalEntry> Get(string eventtype, SQLiteConnectionUser cn, DbTransaction tn = null) { Dictionary <long, TravelLogUnit> tlus = TravelLogUnit.GetAll().ToDictionary(t => t.ID); using (DbCommand cmd = cn.CreateCommand("select * from JournalEntries where EventType=@ev", tn)) { cmd.AddParameterWithValue("@ev", eventtype); using (DbDataReader reader = cmd.ExecuteReader()) { List <JournalEntry> entries = new List <JournalEntry>(); while (reader.Read()) { JournalEntry je = CreateJournalEntry(reader); entries.Add(je); } return(entries); } } }
private static NetLogFileReader OpenFileReader(FileInfo fi, Dictionary <string, TravelLogUnit> tlu_lookup = null, Dictionary <string, List <JournalLocOrJump> > vsc_lookup = null, Dictionary <string, NetLogFileReader> netlogreaders = null) { NetLogFileReader reader; TravelLogUnit tlu; List <JournalLocOrJump> vsclist = null; if (vsc_lookup != null && vsc_lookup.ContainsKey(fi.Name)) { vsclist = vsc_lookup[fi.Name]; } if (netlogreaders != null && netlogreaders.ContainsKey(fi.Name)) { return(netlogreaders[fi.Name]); } else if (tlu_lookup != null && tlu_lookup.ContainsKey(fi.Name)) { tlu = tlu_lookup[fi.Name]; tlu.Path = fi.DirectoryName; reader = new NetLogFileReader(tlu, vsclist); } else if (TravelLogUnit.TryGet(fi.Name, out tlu)) { tlu.Path = fi.DirectoryName; reader = new NetLogFileReader(tlu, vsclist); } else { reader = new NetLogFileReader(fi.FullName); } if (netlogreaders != null) { netlogreaders[fi.Name] = reader; } return(reader); }
public static bool ReadLogTimeInfo(TravelLogUnit tlu, out DateTime lastlog, out TimeZoneInfo tzi, out TimeSpan tzoffset, out bool is23netlog) { string line = null; string filename = Path.Combine(tlu.Path, tlu.Name); lastlog = DateTime.UtcNow; tzi = TimeZoneInfo.Local; tzoffset = tzi.GetUtcOffset(lastlog); is23netlog = false; if (!File.Exists(filename)) { return(false); } // Try to read the first line of the log file try { using (Stream stream = File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { using (TextReader reader = new StreamReader(stream)) { line = reader.ReadLine(); if (line == null) { return(false); } Regex re = netlogHeaderRe; string timefmt = "yy-MM-dd-HH:mm"; string gmtimefmt = "h\\:mm"; if (line == "============================================") { re = netlogHeaderRe23; is23netlog = true; reader.ReadLine(); line = reader.ReadLine(); timefmt = "yyyy-MM-dd HH:mm"; } // Extract the start time from the first line Match match = re.Match(line); if (match != null && match.Success) { string localtimestr = match.Groups["Localtime"].Value; string timezonename = match.Groups["Timezone"].Value.Trim(); string gmtimestr = is23netlog ? null : match.Groups["GMT"].Value; if (is23netlog) { reader.ReadLine(); line = reader.ReadLine(); if (line == null || line.Length < 13 || line[0] != '{' || line[3] != ':' || line[6] != ':' || line[9] != 'G') { return(false); } gmtimestr = line.Substring(1, 5); } DateTime localtime = DateTime.MinValue; TimeSpan gmtime = TimeSpan.MinValue; tzi = TimeZoneInfo.GetSystemTimeZones().FirstOrDefault(t => t.DaylightName.Trim() == timezonename || t.StandardName.Trim() == timezonename); if (tzi != null) { tzoffset = tzi.GetUtcOffset(lastlog); } if (DateTime.TryParseExact(localtimestr, timefmt, CultureInfo.InvariantCulture, DateTimeStyles.None, out localtime) && TimeSpan.TryParseExact(gmtimestr, gmtimefmt, CultureInfo.InvariantCulture, out gmtime)) { // Grab the timezone offset tzoffset = localtime.TimeOfDay - gmtime; if (tzoffset.Minutes == 59 || tzoffset.Minutes == 44 || tzoffset.Minutes == 29 || tzoffset.Minutes == 14) { tzoffset = tzoffset + new TimeSpan(0, 1, 0); } if (tzi != null) { // Correct for wildly inaccurate values if (tzoffset > tzi.BaseUtcOffset + TimeSpan.FromHours(18)) { tzoffset -= TimeSpan.FromHours(24); } else if (tzoffset < tzi.BaseUtcOffset - TimeSpan.FromHours(18)) { tzoffset += TimeSpan.FromHours(24); } } else { // No timezone specified - try to make the timezone offset make sense // Unfortunately anything east of Tonga (GMT+13) or west of Hawaii (GMT-10) // will be a day off. if (tzoffset <= TimeSpan.FromHours(-10.5)) { tzoffset += TimeSpan.FromHours(24); } else if (tzoffset > TimeSpan.FromHours(13.5)) { tzoffset -= TimeSpan.FromHours(24); } double tzhrs = tzoffset.TotalHours; bool tzneg = tzhrs < 0; if (tzneg) { tzhrs = -tzhrs; } int tzmins = (int)Math.Truncate(tzhrs * 60) % 60; tzhrs = Math.Truncate(tzhrs); string tzname = tzhrs == 0 ? "GMT" : $"GMT{(tzneg ? "-" : "+")}{tzhrs.ToString("00", CultureInfo.InvariantCulture)}{tzmins.ToString("00", CultureInfo.InvariantCulture)}"; tzi = TimeZoneInfo.CreateCustomTimeZone(tzname, tzoffset, tzname, tzname); } // Set the start time, timezone info and timezone offset lastlog = localtime - tzoffset; } if (is23netlog) { tzoffset = TimeSpan.Zero; tzi = TimeZoneInfo.Utc; } return(true); } } } } catch { return(false); } return(false); }
static public void ParseFiles(string datapath, out string error, int defaultMapColour, Func <bool> cancelRequested, Action <int, string> updateProgress, bool forceReload = false, Dictionary <string, NetLogFileReader> netlogreaders = null, int currentcmdrid = -1) { error = null; if (datapath == null) { error = "Netlog directory not set!"; return; } if (!Directory.Exists(datapath)) // if logfiles directory is not found { error = "Netlog directory is not present!"; return; } if (netlogreaders == null) { netlogreaders = new Dictionary <string, NetLogFileReader>(); } if (currentcmdrid < 0) { currentcmdrid = EDDConfig.Instance.CurrentCmdrID; } // TLUs List <TravelLogUnit> tlus = TravelLogUnit.GetAll(); Dictionary <string, TravelLogUnit> netlogtravelogUnits = tlus.Where(t => t.type == 1).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); Dictionary <long, string> travellogunitid2name = netlogtravelogUnits.Values.ToDictionary(t => t.id, t => t.Name); Dictionary <string, List <JournalLocOrJump> > vsc_lookup = JournalEntry.GetAll().OfType <JournalLocOrJump>().GroupBy(v => v.TLUId).Where(g => travellogunitid2name.ContainsKey(g.Key)).ToDictionary(g => travellogunitid2name[g.Key], g => g.ToList()); // list of systems in journal, sorted by time List <JournalLocOrJump> vsSystemsEnts = JournalEntry.GetAll(currentcmdrid).OfType <JournalLocOrJump>().OrderBy(j => j.EventTimeUTC).ToList(); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(datapath, "netLog.*.log", SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <NetLogFileReader> readersToUpdate = new List <NetLogFileReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, netlogtravelogUnits, vsc_lookup, netlogreaders); if (!netlogtravelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { netlogtravelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } for (int i = 0; i < readersToUpdate.Count; i++) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { int ji = 0; NetLogFileReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); using (DbTransaction tn = cn.BeginTransaction()) { foreach (JObject jo in reader.ReadSystems(cancelRequested, currentcmdrid)) { jo["EDDMapColor"] = defaultMapColour; JournalLocOrJump je = new JournalFSDJump(jo) { TLUId = (int)reader.TravelLogUnit.id, CommanderId = currentcmdrid, }; while (ji < vsSystemsEnts.Count && vsSystemsEnts[ji].EventTimeUTC < je.EventTimeUTC) { ji++; // move to next entry which is bigger in time or equal to ours. } JournalLocOrJump prev = (ji > 0 && (ji - 1) < vsSystemsEnts.Count) ? vsSystemsEnts[ji - 1] : null; JournalLocOrJump next = ji < vsSystemsEnts.Count ? vsSystemsEnts[ji] : null; bool previssame = (prev != null && prev.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!prev.HasCoordinate || !je.HasCoordinate || (prev.StarPos - je.StarPos).LengthSquared < 0.01)); bool nextissame = (next != null && next.StarSystem.Equals(je.StarSystem, StringComparison.CurrentCultureIgnoreCase) && (!next.HasCoordinate || !je.HasCoordinate || (next.StarPos - je.StarPos).LengthSquared < 0.01)); // System.Diagnostics.Debug.WriteLine("{0} {1} {2}", ji, vsSystemsEnts[ji].EventTimeUTC, je.EventTimeUTC); if (!(previssame || nextissame)) { je.Add(cn, tn); System.Diagnostics.Debug.WriteLine("Add {0} {1}", je.EventTimeUTC, je.EventDataString); } } tn.Commit(); reader.TravelLogUnit.Update(); } if (updateProgress != null) { updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); } } } }
void Process(List <JournalFSDJump> edsmlogs, DateTime logstarttime, DateTime logendtime) { // Get all of the local entries now that we have the entries from EDSM // Moved here to avoid the race that could have been causing duplicate entries // EDSM only returns FSD entries, so only look for them. Tested 27/4/2018 after the HE optimisations List <HistoryEntry> hlfsdlist = JournalEntry.GetAll(Commander.Nr, logstarttime.AddDays(-1), logendtime.AddDays(1)). OfType <JournalLocOrJump>().OrderBy(je => je.EventTimeUTC). Select(je => HistoryEntry.FromJournalEntry(je, null, out bool jupdate)).ToList(); // using HE just because of the FillEDSM func HistoryList hl = new HistoryList(hlfsdlist); // just so we can access the FillEDSM func List <JournalFSDJump> toadd = new List <JournalFSDJump>(); int previdx = -1; foreach (JournalFSDJump jfsd in edsmlogs) // find out list of ones not present { int index = hlfsdlist.FindIndex(x => x.System.Name.Equals(jfsd.StarSystem, StringComparison.InvariantCultureIgnoreCase) && x.EventTimeUTC.Ticks == jfsd.EventTimeUTC.Ticks); if (index < 0) { // Look for any entries where DST may have thrown off the time foreach (var vi in hlfsdlist.Select((v, i) => new { v = v, i = i }).Where(vi => vi.v.System.Name.Equals(jfsd.StarSystem, StringComparison.InvariantCultureIgnoreCase))) { if (vi.i > previdx) { double hdiff = vi.v.EventTimeUTC.Subtract(jfsd.EventTimeUTC).TotalHours; if (hdiff >= -2 && hdiff <= 2 && hdiff == Math.Floor(hdiff)) { if (vi.v.System.EDSMID <= 0) // if we don't have a valid EDMSID.. { vi.v.System.EDSMID = 0; hl.FillEDSM(vi.v); } if (vi.v.System.EDSMID <= 0 || vi.v.System.EDSMID == jfsd.EdsmID) { index = vi.i; break; } } } } } if (index < 0) // its not a duplicate, add to db { toadd.Add(jfsd); } else { // it is a duplicate, check if the first discovery flag is set right JournalFSDJump existingfsd = hlfsdlist[index].journalEntry as JournalFSDJump; if (existingfsd != null && existingfsd.EDSMFirstDiscover != jfsd.EDSMFirstDiscover) // if we have a FSD one, and first discover is different { existingfsd.UpdateFirstDiscover(jfsd.EDSMFirstDiscover); } previdx = index; } } if (toadd.Count > 0) // if we have any, we can add { System.Diagnostics.Debug.WriteLine($"Adding EDSM logs count {toadd.Count}"); TravelLogUnit tlu = new TravelLogUnit(); // need a tlu for it tlu.type = TravelLogUnit.EDSMType; // EDSM tlu.Name = "EDSM-" + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture); tlu.Size = 0; tlu.Path = "EDSM"; tlu.CommanderId = EDCommander.CurrentCmdrID; tlu.Add(); // Add to Database UserDatabase.Instance.ExecuteWithDatabase(cn => { foreach (JournalFSDJump jfsd in toadd) { System.Diagnostics.Trace.WriteLine(string.Format("Add {0} {1}", jfsd.EventTimeUTC, jfsd.StarSystem)); jfsd.SetTLUCommander(tlu.id, tlu.CommanderId.Value); // update its TLU id to the TLU made above jfsd.Add(jfsd.CreateFSDJournalEntryJson(), cn.Connection); // add it to the db with the JSON created } }); LogLine($"Retrieved {toadd.Count} log entries from EDSM, from {logstarttime.ToLocalTime().ToString()} to {logendtime.ToLocalTime().ToString()}"); OnDownloadedSystems?.Invoke(); } }
private void FetcherThreadProc() { Trace.WriteLine($"EDSM Thread logs start"); bool jupdate = false; DateTime lastCommentFetch = DateTime.MinValue; int waittime = 2000; // Max 1 request every 2 seconds, with a backoff if the rate limit is hit if (EDSMRequestBackoffTime > DateTime.UtcNow) { waittime = (int)Math.Min(EDSMMaxLogAgeMinutes * 60000, Math.Min(BackoffInterval.TotalSeconds * 1000, EDSMRequestBackoffTime.Subtract(DateTime.UtcNow).TotalSeconds * 1000)); } while (!ExitRequested.WaitOne(waittime)) { EDSMClass edsm = new EDSMClass(Commander); if (edsm.ValidCredentials && DateTime.UtcNow > lastCommentFetch.AddHours(1)) { edsm.GetComments(l => Trace.WriteLine(l)); lastCommentFetch = DateTime.UtcNow; } DateTime logstarttime = DateTime.MinValue; // return what we got.. DateTime logendtime = DateTime.MinValue; List <JournalFSDJump> edsmlogs = null; int res = -1; //return code if (edsm.ValidCredentials && Commander.SyncFromEdsm && DateTime.UtcNow > EDSMRequestBackoffTime) { if (DateTime.UtcNow.Subtract(LastEventTime).TotalMinutes >= EDSMMaxLogAgeMinutes) { System.Diagnostics.Debug.WriteLine($"Retrieving EDSM logs starting {LastEventTime}"); res = edsm.GetLogs(LastEventTime, null, out edsmlogs, out logstarttime, out logendtime); } else if (FirstEventTime > GammaStart) { System.Diagnostics.Debug.WriteLine($"Retrieving EDSM logs ending {FirstEventTime}"); res = edsm.GetLogs(null, FirstEventTime, out edsmlogs, out logstarttime, out logendtime); } } if (ExitRequested.WaitOne(0)) { return; } if (res == 429) // Rate Limit Exceeded { Trace.WriteLine($"EDSM Log request rate limit hit - backing off for {BackoffInterval.TotalSeconds}s"); EDSMRequestBackoffTime = DateTime.UtcNow + BackoffInterval; BackoffInterval = BackoffInterval + TimeSpan.FromSeconds(60); } else if (logstarttime > LastEventTime && logendtime < FirstEventTime) { Trace.WriteLine($"Bad start and/or end times returned by EDSM - backing off for {BackoffInterval.TotalSeconds}s"); EDSMRequestBackoffTime = DateTime.UtcNow + BackoffInterval; BackoffInterval = BackoffInterval + TimeSpan.FromSeconds(60); } else if (res == 100 && edsmlogs != null) { if (edsmlogs.Count > 0) // if anything to process.. { //Trace.WriteLine($"Retrieving EDSM logs count {edsmlogs.Count}"); BackoffInterval = TimeSpan.FromSeconds(60); if (logendtime > DateTime.UtcNow) { logendtime = DateTime.UtcNow; } if (logstarttime < DateTime.MinValue.AddDays(1)) { logstarttime = DateTime.MinValue.AddDays(1); } // Get all of the local entries now that we have the entries from EDSM // Moved here to avoid the race that could have been causing duplicate entries // EDSM only returns FSD entries, so only look for them. Tested 27/4/2018 after the HE optimisations List <HistoryEntry> hlfsdlist = JournalEntry.GetAll(Commander.Nr, logstarttime.AddDays(-1), logendtime.AddDays(1)). OfType <JournalLocOrJump>().OrderBy(je => je.EventTimeUTC). Select(je => HistoryEntry.FromJournalEntry(je, null, out jupdate)).ToList(); // using HE just because of the FillEDSM func HistoryList hl = new HistoryList(hlfsdlist); // just so we can access the FillEDSM func List <JournalFSDJump> toadd = new List <JournalFSDJump>(); int previdx = -1; foreach (JournalFSDJump jfsd in edsmlogs) // find out list of ones not present { int index = hlfsdlist.FindIndex(x => x.System.Name.Equals(jfsd.StarSystem, StringComparison.InvariantCultureIgnoreCase) && x.EventTimeUTC.Ticks == jfsd.EventTimeUTC.Ticks); if (index < 0) { // Look for any entries where DST may have thrown off the time foreach (var vi in hlfsdlist.Select((v, i) => new { v = v, i = i }).Where(vi => vi.v.System.Name.Equals(jfsd.StarSystem, StringComparison.InvariantCultureIgnoreCase))) { if (vi.i > previdx) { double hdiff = vi.v.EventTimeUTC.Subtract(jfsd.EventTimeUTC).TotalHours; if (hdiff >= -2 && hdiff <= 2 && hdiff == Math.Floor(hdiff)) { if (vi.v.System.EDSMID <= 0) // if we don't have a valid EDMSID.. { vi.v.System.EDSMID = 0; hl.FillEDSM(vi.v); } if (vi.v.System.EDSMID <= 0 || vi.v.System.EDSMID == jfsd.EdsmID) { index = vi.i; break; } } } } } if (index < 0) // its not a duplicate, add to db { toadd.Add(jfsd); } else { // it is a duplicate, check if the first discovery flag is set right JournalFSDJump existingfsd = hlfsdlist[index].journalEntry as JournalFSDJump; if (existingfsd != null && existingfsd.EDSMFirstDiscover != jfsd.EDSMFirstDiscover) // if we have a FSD one, and first discover is different { existingfsd.UpdateFirstDiscover(jfsd.EDSMFirstDiscover); } previdx = index; } } if (toadd.Count > 0) // if we have any, we can add { Trace.WriteLine($"Adding EDSM logs count {toadd.Count}"); TravelLogUnit tlu = new TravelLogUnit(); // need a tlu for it tlu.type = 2; // EDSM tlu.Name = "EDSM-" + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture); tlu.Size = 0; tlu.Path = "EDSM"; tlu.CommanderId = EDCommander.CurrentCmdrID; tlu.Add(); // Add to Database using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { foreach (JournalFSDJump jfsd in toadd) { System.Diagnostics.Trace.WriteLine(string.Format("Add {0} {1}", jfsd.EventTimeUTC, jfsd.StarSystem)); jfsd.SetTLUCommander(tlu.id, jfsd.CommanderId); // update its TLU id to the TLU made above jfsd.Add(jfsd.CreateFSDJournalEntryJson(), cn); } } LogLine($"Retrieved {toadd.Count} log entries from EDSM, from {logstarttime.ToLocalTime().ToString()} to {logendtime.ToLocalTime().ToString()}"); if (logendtime > LastEventTime || logstarttime <= GammaStart) { if (OnDownloadedSystems != null) { OnDownloadedSystems(); } } } } if (logstarttime < FirstEventTime) { FirstEventTime = logstarttime; } if (logendtime > LastEventTime) { LastEventTime = logendtime; } } } }
// Primary method to fill historylist // Get All journals matching parameters. // if callback set, then each JE is passed back thru callback and not accumulated. Callback is in a thread. Callback can stop the accumulation if it returns false static public List <JournalEntry> GetAll(int commander = -999, DateTime?startdateutc = null, DateTime?enddateutc = null, JournalTypeEnum[] ids = null, DateTime?allidsafterutc = null, Func <JournalEntry, Object, bool> callback = null, Object callbackobj = null, int chunksize = 1000) { var tluslist = TravelLogUnit.GetAll(); Dictionary <long, TravelLogUnit> tlus = tluslist.ToDictionary(t => t.ID); DbCommand cmd = null; DbDataReader reader = null; List <JournalEntry> entries = new List <JournalEntry>(); try { cmd = UserDatabase.Instance.ExecuteWithDatabase(cn => cn.Connection.CreateCommand("select Id,TravelLogId,CommanderId,EventData,Synced from JournalEntries")); reader = UserDatabase.Instance.ExecuteWithDatabase(cn => { string cnd = ""; if (commander != -999) { cnd = cnd.AppendPrePad("CommanderID = @commander", " and "); cmd.AddParameterWithValue("@commander", commander); } if (startdateutc != null) { cnd = cnd.AppendPrePad("EventTime >= @after", " and "); cmd.AddParameterWithValue("@after", startdateutc.Value); } if (enddateutc != null) { cnd = cnd.AppendPrePad("EventTime <= @before", " and "); cmd.AddParameterWithValue("@before", enddateutc.Value); } if (ids != null) { int[] array = Array.ConvertAll(ids, x => (int)x); if (allidsafterutc != null) { cmd.AddParameterWithValue("@idafter", allidsafterutc.Value); cnd = cnd.AppendPrePad("(EventTypeId in (" + string.Join(",", array) + ") Or EventTime>=@idafter)", " and "); } else { cnd = cnd.AppendPrePad("EventTypeId in (" + string.Join(",", array) + ")", " and "); } } if (cnd.HasChars()) { cmd.CommandText += " where " + cnd; } cmd.CommandText += " Order By EventTime,Id ASC"; return(cmd.ExecuteReader()); }); List <JournalEntry> retlist = null; #if TIMESCAN Dictionary <string, List <long> > times = new Dictionary <string, List <long> >(); Stopwatch sw = new Stopwatch(); sw.Start(); #endif do { // experiments state that reading the DL is not the time sink, its creating the journal entries retlist = UserDatabase.Instance.ExecuteWithDatabase(cn => // split into smaller chunks to allow other things access.. { List <JournalEntry> list = new List <JournalEntry>(); while (list.Count < chunksize && reader.Read()) { #if TIMESCAN long t = sw.ElapsedTicks; #endif JournalEntry je = JournalEntry.CreateJournalEntryFixedPos(reader); list.Add(je); #if TIMESCAN long tw = sw.ElapsedTicks - t; if (!times.TryGetValue(sys.EventTypeStr, out var x)) { times[sys.EventTypeStr] = new List <long>(); } times[sys.EventTypeStr].Add(tw); #endif } return(list); }); if (callback != null) // collated, now process them, if callback, feed them thru callback procedure { foreach (var e in retlist) { if (!callback.Invoke(e, callbackobj)) // if indicate stop { retlist = null; break; } } } else { entries.AddRange(retlist); } }while (retlist != null && retlist.Count != 0); #if TIMESCAN List <Results> res = new List <Results>(); foreach (var kvp in times) { Results r = new Results(); r.name = kvp.Key; r.avg = kvp.Value.Average(); r.min = kvp.Value.Min(); r.max = kvp.Value.Max(); r.total = kvp.Value.Sum(); r.avgtime = ((double)r.avg / Stopwatch.Frequency * 1000); r.sumtime = ((double)r.total / Stopwatch.Frequency * 1000); r.count = kvp.Value.Count; res.Add(r); } //res.Sort(delegate (Results l, Results r) { return l.sumtime.CompareTo(r.sumtime); }); res.Sort(delegate(Results l, Results r) { return(l.avgtime.CompareTo(r.avgtime)); }); string rs = ""; foreach (var r in res) { rs = rs + Environment.NewLine + string.Format("Time {0} min {1} max {2} avg {3} ms count {4} totaltime {5} ms", r.name, r.min, r.max, r.avgtime.ToString("#.#########"), r.count, r.sumtime.ToString("#.#######")); } System.Diagnostics.Trace.WriteLine(rs); //File.WriteAllText(@"c:\code\times.txt", rs); #endif } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Getall Exception " + ex); } finally { if (reader != null || cmd != null) { UserDatabase.Instance.ExecuteWithDatabase(cn => { reader?.Close(); cmd?.Dispose(); }); } } return(entries); }
public void ParseJournalFiles(Func <bool> cancelRequested, Action <int, string> updateProgress, bool forceReload = false) { System.Diagnostics.Trace.WriteLine("Scanned " + WatcherFolder); Dictionary <string, TravelLogUnit> m_travelogUnits = TravelLogUnit.GetAll().Where(t => (t.type & 0xFF) == 3).GroupBy(t => t.Name).Select(g => g.First()).ToDictionary(t => t.Name); // order by file write time so we end up on the last one written FileInfo[] allFiles = Directory.EnumerateFiles(WatcherFolder, journalfilematch, SearchOption.AllDirectories).Select(f => new FileInfo(f)).OrderBy(p => p.LastWriteTime).ToArray(); List <EDJournalReader> readersToUpdate = new List <EDJournalReader>(); for (int i = 0; i < allFiles.Length; i++) { FileInfo fi = allFiles[i]; var reader = OpenFileReader(fi, m_travelogUnits); // open it if (!m_travelogUnits.ContainsKey(reader.TravelLogUnit.Name)) { m_travelogUnits[reader.TravelLogUnit.Name] = reader.TravelLogUnit; reader.TravelLogUnit.type = 3; reader.TravelLogUnit.Add(); } if (!netlogreaders.ContainsKey(reader.TravelLogUnit.Name)) { netlogreaders[reader.TravelLogUnit.Name] = reader; } if (forceReload) { // Force a reload of the travel log reader.TravelLogUnit.Size = 0; } if (reader.filePos != fi.Length || i == allFiles.Length - 1) // File not already in DB, or is the last one { readersToUpdate.Add(reader); } } for (int i = 0; i < readersToUpdate.Count; i++) { using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { EDJournalReader reader = readersToUpdate[i]; updateProgress(i * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); List <JournalReaderEntry> entries = reader.ReadJournalLog(true).ToList(); // this may create new commanders, and may write to the TLU db ILookup <DateTime, JournalEntry> existing = JournalEntry.GetAllByTLU(reader.TravelLogUnit.id).ToLookup(e => e.EventTimeUTC); using (DbTransaction tn = cn.BeginTransaction()) { foreach (JournalReaderEntry jre in entries) { if (!existing[jre.JournalEntry.EventTimeUTC].Any(e => JournalEntry.AreSameEntry(jre.JournalEntry, e, ent1jo: jre.Json))) { jre.JournalEntry.Add(jre.Json, cn, tn); System.Diagnostics.Trace.WriteLine(string.Format("Write Journal to db {0} {1}", jre.JournalEntry.EventTimeUTC, jre.JournalEntry.EventTypeStr)); } } tn.Commit(); } reader.TravelLogUnit.Update(cn); updateProgress((i + 1) * 100 / readersToUpdate.Count, reader.TravelLogUnit.Name); lastnfi = reader; } } updateProgress(-1, ""); }
// Called by EDJournalClass periodically to scan for journal entries public List <JournalEntry> ScanForNewEntries() { var entries = new List <JournalEntry>(); try { string filename = null; if (lastnfi != null) // always give old file another go, even if we are going to change { if (!File.Exists(lastnfi.FileName)) // if its been removed, null { lastnfi = null; } else { ScanReader(lastnfi, entries); if (entries.Count > 0) { ticksNoActivity = 0; return(entries); // feed back now don't change file } } } if (m_netLogFileQueue.TryDequeue(out filename)) // if a new one queued, we swap to using it { lastnfi = OpenFileReader(new FileInfo(filename)); System.Diagnostics.Debug.WriteLine(string.Format("Change to scan {0}", lastnfi.FileName)); if (lastnfi != null) { ScanReader(lastnfi, entries); // scan new one } } // every few goes, if its not there or filepos is greater equal to length (so only done when fully up to date) else if (ticksNoActivity >= 30 && (lastnfi == null || lastnfi.filePos >= new FileInfo(lastnfi.FileName).Length)) { HashSet <string> tlunames = new HashSet <string>(TravelLogUnit.GetAllNames()); string[] filenames = Directory.EnumerateFiles(WatcherFolder, journalfilematch, SearchOption.AllDirectories) .Select(s => new { name = Path.GetFileName(s), fullname = s }) .Where(s => !tlunames.Contains(s.name)) // find any new ones.. .OrderBy(s => s.name) .Select(s => s.fullname) .ToArray(); foreach (var name in filenames) // for any new filenames.. { System.Diagnostics.Debug.WriteLine("No Activity but found new file " + name); lastnfi = OpenFileReader(new FileInfo(name)); break; // stop on first found } if (lastnfi != null) { ScanReader(lastnfi, entries); // scan new one } ticksNoActivity = 0; } ticksNoActivity++; return(entries); } catch (Exception ex) { System.Diagnostics.Trace.WriteLine("Net tick exception : " + ex.Message); System.Diagnostics.Trace.WriteLine(ex.StackTrace); return(new List <JournalEntry>()); } }
// Called by EDScanner periodically to scan for journal entries public Tuple <List <JournalEntry>, List <UIEvent> > ScanForNewEntries() { var entries = new List <JournalEntry>(); var uientries = new List <UIEvent>(); string filename = null; if (lastnfi != null) // always give old file another go, even if we are going to change { if (!File.Exists(lastnfi.FullName)) // if its been removed, null { lastnfi = null; } else { //var notdone = new FileInfo(lastnfi.FullName).Length != lastnfi.Pos ? "**********" : ""; System.Diagnostics.Debug.WriteLine($"Scan last nfi {lastnfi.FullName} from {lastnfi.Pos} Length file is {new FileInfo(lastnfi.FullName).Length} {notdone} "); ScanReader(entries, uientries); if (entries.Count > 0 || uientries.Count > 0) { // System.Diagnostics.Debug.WriteLine("ScanFornew read " + entries.Count() + " ui " + uientries.Count()); ticksNoActivity = 0; return(new Tuple <List <JournalEntry>, List <UIEvent> >(entries, uientries)); // feed back now don't change file } } } if (m_netLogFileQueue.TryDequeue(out filename)) // if a new one queued, we swap to using it { lastnfi = OpenFileReader(filename); System.Diagnostics.Debug.WriteLine(string.Format("Change to scan {0}", lastnfi.FullName)); ScanReader(entries, uientries); } else if (ticksNoActivity >= 30 && (lastnfi == null || lastnfi.Pos >= new FileInfo(lastnfi.FullName).Length)) { // every few goes, if its not there or filepos is greater equal to length (so only done when fully up to date) // scan all files in the folder, pick out any new logs, and process the first that is found. try { HashSet <string> tlunames = new HashSet <string>(TravelLogUnit.GetAllNames()); string[] filenames = Directory.EnumerateFiles(WatcherFolder, journalfilematch, SearchOption.AllDirectories) .Select(s => new { name = Path.GetFileName(s), fullname = s }) .Where(s => !tlunames.Contains(s.fullname)) // find any new ones.. .Where(g => new FileInfo(g.fullname).LastWriteTime >= mindateutc) .OrderBy(s => s.name) .Select(s => s.fullname) .ToArray(); if (filenames.Length > 0) { lastnfi = OpenFileReader(filenames[0]); // open first one System.Diagnostics.Debug.WriteLine(string.Format("Found new file {0}", lastnfi.FullName)); ScanReader(entries, uientries); } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Exception during monitor watch file found " + ex); } finally { ticksNoActivity = 0; } } ticksNoActivity++; return(new Tuple <List <JournalEntry>, List <UIEvent> >(entries, uientries)); // feed back now don't change file }
public EDJournalReader(TravelLogUnit tlu) : base(tlu) { }
public LogReaderBase(TravelLogUnit tlu) { this.TravelLogUnit = tlu; }
private JournalReaderEntry ProcessLine(string line, bool resetOnError) { int cmdrid = -2; //-1 is hidden, -2 is never shown if (TravelLogUnit.CommanderId.HasValue) { cmdrid = TravelLogUnit.CommanderId.Value; // System.Diagnostics.Trace.WriteLine(string.Format("TLU says commander {0} at {1}", cmdrid, TravelLogUnit.Name)); } if (line.Length == 0) { return(null); } JObject jo = null; JournalEntry je = null; try { jo = JObject.Parse(line); je = JournalEntry.CreateJournalEntry(jo); } catch { System.Diagnostics.Trace.WriteLine($"Bad journal line:\n{line}"); if (resetOnError) { throw; } else { return(null); } } if (je == null) { System.Diagnostics.Trace.WriteLine($"Bad journal line:\n{line}"); return(null); } bool toosoon = false; if (je.EventTypeID == JournalTypeEnum.Fileheader) { JournalEvents.JournalFileheader header = (JournalEvents.JournalFileheader)je; if ((header.Beta && !EliteConfigInstance.InstanceOptions.DisableBetaCommanderCheck) || EliteConfigInstance.InstanceOptions.ForceBetaOnCommander) // if beta, and not disabled, or force beta { TravelLogUnit.type |= 0x8000; } if (header.Part > 1) { JournalEvents.JournalContinued contd = JournalEntry.GetLast <JournalEvents.JournalContinued>(je.EventTimeUTC.AddSeconds(1), e => e.Part == header.Part); // Carry commander over from previous log if it ends with a Continued event. if (contd != null && Math.Abs(header.EventTimeUTC.Subtract(contd.EventTimeUTC).TotalSeconds) < 5 && contd.CommanderId >= 0) { TravelLogUnit.CommanderId = contd.CommanderId; } } } else if (je.EventTypeID == JournalTypeEnum.LoadGame) { string newname = (je as JournalEvents.JournalLoadGame).LoadGameCommander; if ((TravelLogUnit.type & 0x8000) == 0x8000) { newname = "[BETA] " + newname; } EDCommander commander = EDCommander.GetCommander(newname); if (commander == null) { commander = EDCommander.GetListCommanders().FirstOrDefault(); if (EDCommander.NumberOfCommanders == 1 && commander != null && commander.Name == "Jameson (Default)") { commander.Name = newname; commander.EdsmName = newname; EDCommander.Update(new List <EDCommander> { commander }, false); } else { commander = EDCommander.Create(newname, null, EDJournalClass.GetDefaultJournalDir().Equals(TravelLogUnit.Path) ? "" : TravelLogUnit.Path); } } cmdrid = commander.Nr; if (!TravelLogUnit.CommanderId.HasValue) { TravelLogUnit.CommanderId = cmdrid; TravelLogUnit.Update(); System.Diagnostics.Trace.WriteLine(string.Format("TLU {0} updated with commander {1}", TravelLogUnit.Path, cmdrid)); } } else if (je is ISystemStationEntry && ((ISystemStationEntry)je).IsTrainingEvent) { System.Diagnostics.Trace.WriteLine($"Training detected:\n{line}"); return(null); } if (je is IAdditionalFiles) { if ((je as IAdditionalFiles).ReadAdditionalFiles(Path.GetDirectoryName(FileName), ref jo) == false) // if failed { return(null); } } if (je is JournalEvents.JournalShipyard) // when going into shipyard { toosoon = lastshipyard != null && lastshipyard.Yard.Equals((je as JournalEvents.JournalShipyard).Yard); lastshipyard = je as JournalEvents.JournalShipyard; } else if (je is JournalEvents.JournalStoredShips) // when going into shipyard { toosoon = laststoredships != null && CollectionStaticHelpers.Equals(laststoredships.ShipsHere, (je as JournalEvents.JournalStoredShips).ShipsHere) && CollectionStaticHelpers.Equals(laststoredships.ShipsRemote, (je as JournalEvents.JournalStoredShips).ShipsRemote); laststoredships = je as JournalEvents.JournalStoredShips; } else if (je is JournalEvents.JournalStoredModules) // when going into outfitting { toosoon = laststoredmodules != null && CollectionStaticHelpers.Equals(laststoredmodules.ModuleItems, (je as JournalEvents.JournalStoredModules).ModuleItems); laststoredmodules = je as JournalEvents.JournalStoredModules; } else if (je is JournalEvents.JournalOutfitting) // when doing into outfitting { toosoon = lastoutfitting != null && lastoutfitting.ItemList.Equals((je as JournalEvents.JournalOutfitting).ItemList); lastoutfitting = je as JournalEvents.JournalOutfitting; } else if (je is JournalEvents.JournalMarket) { toosoon = lastmarket != null && lastmarket.Equals(je as JournalEvents.JournalMarket); lastmarket = je as JournalEvents.JournalMarket; } else if (je is JournalEvents.JournalUndocked || je is JournalEvents.JournalLoadGame) // undocked, Load Game, repeats are cleared { lastshipyard = null; laststoredmodules = null; lastoutfitting = null; laststoredmodules = null; laststoredships = null; } if (toosoon) // if seeing repeats, remove { System.Diagnostics.Debug.WriteLine("**** Remove as dup " + je.EventTypeStr); return(null); } je.TLUId = (int)TravelLogUnit.id; je.CommanderId = cmdrid; return(new JournalReaderEntry { JournalEntry = je, Json = jo }); }
public List <JournalEntry> ScanForNewEntries() { var entries = new List <JournalEntry>(); EDJournalReader nfi = null; try { string filename = null; if (lastnfi != null) { ScanReader(lastnfi, entries); } if (entries.Count != 0) { return(entries); } if (m_netLogFileQueue.TryDequeue(out filename)) // if a new one queued, we swap to using it { nfi = OpenFileReader(new FileInfo(filename)); lastnfi = nfi; System.Diagnostics.Trace.WriteLine(string.Format("Change in file, scan {0}", lastnfi.FileName)); } else if (ticksNoActivity >= 30 && (lastnfi == null || (!File.Exists(lastnfi.FileName) || lastnfi.filePos >= new FileInfo(lastnfi.FileName).Length))) { if (lastnfi == null) { Trace.Write($"No last file - scanning for journals"); } else if (!File.Exists(lastnfi.FileName)) { Trace.WriteLine($"File {lastnfi.FileName} not found - scanning for journals"); } else { // Trace.WriteLine($"No activity on {lastnfi.FileName} for 60 seconds ({lastnfi.filePos} >= {new FileInfo(lastnfi.FileName).Length} - scanning for new journals"); } HashSet <string> tlunames = new HashSet <string>(TravelLogUnit.GetAllNames()); string[] filenames = Directory.EnumerateFiles(m_watcherfolder, "Journal.*.log", SearchOption.AllDirectories) .Select(s => new { name = Path.GetFileName(s), fullname = s }) .Where(s => !tlunames.Contains(s.name)) .OrderBy(s => s.name) .Select(s => s.fullname) .ToArray(); ticksNoActivity = 0; foreach (var name in filenames) { nfi = OpenFileReader(new FileInfo(name)); lastnfi = nfi; break; } } else { nfi = lastnfi; } ticksNoActivity++; if (nfi != null) { ScanReader(nfi, entries); } return(entries); } catch (Exception ex) { System.Diagnostics.Trace.WriteLine("Net tick exception : " + ex.Message); System.Diagnostics.Trace.WriteLine(ex.StackTrace); return(new List <JournalEntry>()); } }
private void FetcherThreadProc() { Trace.WriteLine($"EDSM Thread logs start"); bool jupdate = false; int waittime = 2000; // Max 1 request every 2 seconds, with a backoff if the rate limit is hit if (EDSMRequestBackoffTime > DateTime.UtcNow) { waittime = (int)Math.Min(EDSMMaxLogAgeMinutes * 60000, Math.Min(BackoffInterval.TotalSeconds * 1000, EDSMRequestBackoffTime.Subtract(DateTime.UtcNow).TotalSeconds * 1000)); } // get them as of now.. since we are searching back in time it should be okay. On a refresh we would start again! List <HistoryEntry> hlfsdlist = JournalEntry.GetAll(Commander.Nr).OfType <JournalLocOrJump>().OrderBy(je => je.EventTimeUTC).Select(je => HistoryEntry.FromJournalEntry(je, null, false, out jupdate)).ToList(); while (!ExitRequested.WaitOne(waittime)) { EDSMClass edsm = new EDSMClass { apiKey = Commander.APIKey, commanderName = Commander.EdsmName }; List <HistoryEntry> edsmlogs = null; DateTime logstarttime = DateTime.MinValue; DateTime logendtime = DateTime.MinValue; int res = -1; if (edsm.IsApiKeySet && Commander.SyncFromEdsm && DateTime.UtcNow > EDSMRequestBackoffTime) { if (DateTime.UtcNow.Subtract(LastEventTime).TotalMinutes >= EDSMMaxLogAgeMinutes) { //Trace.WriteLine($"Retrieving EDSM logs starting {LastEventTime}"); res = edsm.GetLogs(LastEventTime, null, out edsmlogs, out logstarttime, out logendtime); } else if (FirstEventTime > GammaStart) { //Trace.WriteLine($"Retrieving EDSM logs ending {FirstEventTime}"); res = edsm.GetLogs(null, FirstEventTime, out edsmlogs, out logstarttime, out logendtime); } } if (ExitRequested.WaitOne(0)) { return; } if (res == 429) // Rate Limit Exceeded { Trace.WriteLine($"EDSM Log request rate limit hit - backing off for {BackoffInterval.TotalSeconds}s"); EDSMRequestBackoffTime = DateTime.UtcNow + BackoffInterval; BackoffInterval = BackoffInterval + TimeSpan.FromSeconds(60); } else if (logstarttime > LastEventTime && logendtime < FirstEventTime) { Trace.WriteLine($"Bad start and/or end times returned by EDSM - backing off for {BackoffInterval.TotalSeconds}s"); EDSMRequestBackoffTime = DateTime.UtcNow + BackoffInterval; BackoffInterval = BackoffInterval + TimeSpan.FromSeconds(60); } else if (res == 100 && edsmlogs != null) { if (edsmlogs.Count > 0) // if anything to process.. { //Trace.WriteLine($"Retrieving EDSM logs count {edsmlogs.Count}"); BackoffInterval = TimeSpan.FromSeconds(60); if (logendtime > DateTime.UtcNow) { logendtime = DateTime.UtcNow; } HistoryList hl = new HistoryList(hlfsdlist); List <DateTime> hlfsdtimes = hlfsdlist.Select(he => he.EventTimeUTC).ToList(); List <HistoryEntry> toadd = new List <HistoryEntry>(); int previdx = -1; foreach (HistoryEntry he in edsmlogs) // find out list of ones not present { int index = hlfsdlist.FindIndex(x => x.System.name.Equals(he.System.name, StringComparison.InvariantCultureIgnoreCase) && x.EventTimeUTC.Ticks == he.EventTimeUTC.Ticks); if (index < 0) { // Look for any entries where DST may have thrown off the time foreach (var vi in hlfsdlist.Select((v, i) => new { v = v, i = i }).Where(vi => vi.v.System.name.Equals(he.System.name, StringComparison.InvariantCultureIgnoreCase))) { if (vi.i > previdx) { double hdiff = vi.v.EventTimeUTC.Subtract(he.EventTimeUTC).TotalHours; if (hdiff >= -2 && hdiff <= 2 && hdiff == Math.Floor(hdiff)) { if (vi.v.System.id_edsm <= 0) { vi.v.System.id_edsm = 0; hl.FillEDSM(vi.v); } if (vi.v.System.id_edsm <= 0 || vi.v.System.id_edsm == he.System.id_edsm) { index = vi.i; break; } } } } } if (index < 0) { toadd.Add(he); } else { HistoryEntry lhe = hlfsdlist[index]; if (he.IsEDSMFirstDiscover && !lhe.IsEDSMFirstDiscover) { lhe.SetFirstDiscover(); } previdx = index; } } if (toadd.Count > 0) // if we have any, we can add { Trace.WriteLine($"Adding EDSM logs count {toadd.Count}"); TravelLogUnit tlu = new TravelLogUnit(); // need a tlu for it tlu.type = 2; // EDSM tlu.Name = "EDSM-" + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture); tlu.Size = 0; tlu.Path = "EDSM"; tlu.CommanderId = EDCommander.CurrentCmdrID; tlu.Add(); // Add to Database using (SQLiteConnectionUser cn = new SQLiteConnectionUser(utc: true)) { foreach (HistoryEntry he in toadd) { JObject jo = JournalEntry.CreateFSDJournalEntryJson(he.EventTimeUTC, he.System.name, he.System.x, he.System.y, he.System.z, EliteConfigInstance.InstanceConfig.DefaultMapColour); JournalEntry je = JournalEntry.CreateFSDJournalEntry(tlu.id, tlu.CommanderId.Value, (int)SyncFlags.EDSM, jo); System.Diagnostics.Trace.WriteLine(string.Format("Add {0} {1}", je.EventTimeUTC, he.System.name)); je.Add(jo, cn); } } LogLine($"Retrieved {toadd.Count} log entries from EDSM, from {logstarttime.ToLocalTime().ToString()} to {logendtime.ToLocalTime().ToString()}"); if (logendtime > LastEventTime || logstarttime <= GammaStart) { if (OnDownloadedSystems != null) { OnDownloadedSystems(); } } } } if (logstarttime < FirstEventTime) { FirstEventTime = logstarttime; } if (logendtime > LastEventTime) { LastEventTime = logendtime; } } } }