private void FileWatcher(string fileToWatch) { var iFmt = new TextLineInputFormat() { iCodepage = _arguments.CodePage, splitLongLines = _arguments.SplitLongLines, recurse = _arguments.Recurse }; Dictionary<string, string> _fnfmap = new Dictionary<string, string>(); using (var syncHandle = new ManualResetEventSlim()) { // Execute the query while (!Stop) { var oLogQuery = new LogQuery(); if (!CancelToken.IsCancellationRequested) { try { var qfiles = string.Format("SELECT Distinct [LogFilename] FROM {0}", fileToWatch); var rsfiles = oLogQuery.Execute(qfiles, iFmt); for (; !rsfiles.atEnd(); rsfiles.moveNext()) { var record = rsfiles.getRecord(); string logName = record.getValue("LogFilename") as string; FileInfo fi = new FileInfo(logName); var dbe = LogsFileDatabase.LookupLogFile(logName); SaveVisitedFileName(dbe.FileName); DateTime creationTime = fi.CreationTimeUtc; bool logHasRolled = dbe.NewFile || (creationTime != dbe.CreationTimeUtc || fi.Length < dbe.LastPosition); if (logHasRolled) { LogManager.GetCurrentClassLogger().Info("Log {0} has rolled", logName); LogsFileDatabase.Roll(dbe); } // Log has rolled or this is a new file, or we haven't processed yet. bool processWholeFile = logHasRolled || !dbe.ProcessedFile; if (processWholeFile) LogsFileDatabase.Update(dbe, true, 0); } rsfiles.close(); foreach (string fileName in Files.ToList()) { var dbe = LogsFileDatabase.LookupLogFile(fileName); var lastRecordNumber = dbe.LastPosition; var query = string.Format("SELECT * FROM {0} where Index > {1}", fileName, lastRecordNumber); var rs = oLogQuery.Execute(query, iFmt); Dictionary<string, int> colMap = new Dictionary<string, int>(); for (int col = 0; col < rs.getColumnCount(); col++) { string colName = rs.getColumnName(col); colMap[colName] = col; } // Browse the recordset for (; !rs.atEnd(); rs.moveNext()) { var record = rs.getRecord(); var json = new JObject(); foreach (var field in _arguments.Fields) { if (!colMap.ContainsKey(field.Name)) continue; if (json["logSource"] == null) { if (string.IsNullOrEmpty(_arguments.LogSource)) json.Add(new JProperty("logSource", fileName)); else json.Add(new JProperty("logSource", _arguments.LogSource)); } object v = record.getValue(field.Name); if (field.DataType == typeof(DateTime)) { DateTime dt = DateTime.Parse(v.ToString()); json.Add(new JProperty(field.Name, dt)); } else json.Add(new JProperty(field.Name, v)); } string msg = json["Text"].ToString(); if (!string.IsNullOrEmpty(msg)) { if (_codecArguments != null && _codecArguments.Type == CodecArguments.CodecType.multiline) { _codec.Apply(msg, this); _receivedMessages++; dbe.IncrementLineCount(); } else { ProcessJson(json); dbe.IncrementLineCount(); _receivedMessages++; } } var lrn = (Int64)record.getValueEx("Index"); LogsFileDatabase.Update(dbe, true, lrn); GC.Collect(); } colMap.Clear(); // Close the recordset rs.close(); rs = null; GC.Collect(); } } catch (FileNotFoundException fnfex) { string fn = fnfex.FileName; if (!string.IsNullOrEmpty(fn) && !_fnfmap.ContainsKey(fn)) { LogManager.GetCurrentClassLogger().Warn(fnfex.Message); _fnfmap[fn] = fn; } } catch (OperationCanceledException) { break; } catch (Exception ex) { LogManager.GetCurrentClassLogger().Error(ex); } finally { try { oLogQuery = null; // Sleep if (!Stop) syncHandle.Wait(TimeSpan.FromSeconds(_pollingIntervalInSeconds), CancelToken); } catch (OperationCanceledException) { } catch (Exception ex1) { LogManager.GetCurrentClassLogger().Warn(ex1); } } } } Finished(); } }
private void FileWatcher(string fileToWatch) { var iFmt = new TextLineInputFormat() { iCodepage = _arguments.CodePage, splitLongLines = _arguments.SplitLongLines, recurse = _arguments.Recurse }; // Execute the query while (!CancelToken.IsCancellationRequested) { var oLogQuery = new LogQuery(); try { Thread.CurrentThread.Priority = ThreadPriority.BelowNormal; var qfiles = string.Format("SELECT Distinct [LogFilename] FROM {0}", fileToWatch); var rsfiles = oLogQuery.Execute(qfiles, iFmt); for (; !rsfiles.atEnd(); rsfiles.moveNext()) { var record = rsfiles.getRecord(); string logName = record.getValue("LogFilename") as string; FileInfo fi = new FileInfo(logName); if (!fi.Exists) { _logFileCreationTimes.Remove(logName); _logFileMaxRecords.Remove(logName); _logFileSizes.Remove(logName); } _logFileSampleTimes[logName] = DateTime.UtcNow; DateTime creationTime = fi.CreationTimeUtc; bool logHasRolled = (_logFileCreationTimes.ContainsKey(logName) && creationTime > _logFileCreationTimes[logName]) || (_logFileSizes.ContainsKey(logName) && fi.Length < _logFileSizes[logName]); if (!_logFileMaxRecords.ContainsKey(logName) || logHasRolled) { _logFileCreationTimes[logName] = creationTime; _logFileSizes[logName] = fi.Length; var qcount = string.Format("SELECT max(Index) as MaxRecordNumber FROM {0}", logName); var rcount = oLogQuery.Execute(qcount, iFmt); var qr = rcount.getRecord(); var lrn = (Int64)qr.getValueEx("MaxRecordNumber"); if (logHasRolled) { LogManager.GetCurrentClassLogger().Info("Log {0} has rolled", logName); lrn = 0; } _logFileMaxRecords[logName] = lrn; } _logFileSizes[logName] = fi.Length; } rsfiles.close(); foreach (string fileName in _logFileMaxRecords.Keys.ToList()) { var lastRecordNumber = _logFileMaxRecords[fileName]; var query = string.Format("SELECT * FROM {0} where Index > {1}", fileName, lastRecordNumber); var rs = oLogQuery.Execute(query, iFmt); Dictionary <string, int> colMap = new Dictionary <string, int>(); for (int col = 0; col < rs.getColumnCount(); col++) { string colName = rs.getColumnName(col); colMap[colName] = col; } // Browse the recordset for (; !rs.atEnd(); rs.moveNext()) { var record = rs.getRecord(); var json = new JObject(); foreach (var field in _arguments.Fields) { if (!colMap.ContainsKey(field.Name)) { continue; } object v = record.getValue(field.Name); if (field.DataType == typeof(DateTime)) { DateTime dt = DateTime.Parse(v.ToString()); json.Add(new JProperty(field.Name, dt)); } else { json.Add(new JProperty(field.Name, v)); } } string msg = json["Text"].ToString(); if (!string.IsNullOrEmpty(msg)) { ProcessJson(json); _receivedMessages++; } var lrn = (Int64)record.getValueEx("Index"); _logFileMaxRecords[fileName] = lrn; GC.Collect(); } colMap.Clear(); // Close the recordset rs.close(); rs = null; GC.Collect(); } } catch (Exception ex) { LogManager.GetCurrentClassLogger().Error(ex); } finally { oLogQuery = null; } Thread.CurrentThread.Priority = ThreadPriority.Normal; System.Threading.Thread.Sleep(_pollingIntervalInSeconds * 1000); } Finished(); }
private void FileWatcher(string fileToWatch) { var iFmt = new TextLineInputFormat() { iCodepage = _arguments.CodePage, splitLongLines = _arguments.SplitLongLines, recurse = _arguments.Recurse }; Dictionary <string, string> _fnfmap = new Dictionary <string, string>(); using (var syncHandle = new ManualResetEventSlim()) { // Execute the query while (!Stop) { var oLogQuery = new LogQuery(); if (!CancelToken.IsCancellationRequested) { try { var qfiles = string.Format("SELECT Distinct [LogFilename] FROM {0}", fileToWatch); var rsfiles = oLogQuery.Execute(qfiles, iFmt); for (; !rsfiles.atEnd(); rsfiles.moveNext()) { var record = rsfiles.getRecord(); string logName = record.getValue("LogFilename") as string; FileInfo fi = new FileInfo(logName); var dbe = LogsFileDatabase.LookupLogFile(logName); SaveVisitedFileName(dbe.FileName); DateTime creationTime = fi.CreationTimeUtc; bool logHasRolled = dbe.NewFile || (creationTime != dbe.CreationTimeUtc || fi.Length < dbe.LastPosition); if (logHasRolled) { LogManager.GetCurrentClassLogger().Info("Log {0} has rolled", logName); LogsFileDatabase.Roll(dbe); } // Log has rolled or this is a new file, or we haven't processed yet. bool processWholeFile = logHasRolled || !dbe.ProcessedFile; if (processWholeFile) { LogsFileDatabase.Update(dbe, true, 0); } } rsfiles.close(); foreach (string fileName in Files.ToList()) { var dbe = LogsFileDatabase.LookupLogFile(fileName); var lastRecordNumber = dbe.LastPosition; var query = string.Format("SELECT * FROM {0} where Index > {1}", fileName, lastRecordNumber); var rs = oLogQuery.Execute(query, iFmt); Dictionary <string, int> colMap = new Dictionary <string, int>(); for (int col = 0; col < rs.getColumnCount(); col++) { string colName = rs.getColumnName(col); colMap[colName] = col; } // Browse the recordset for (; !rs.atEnd(); rs.moveNext()) { var record = rs.getRecord(); var json = new JObject(); foreach (var field in _arguments.Fields) { if (!colMap.ContainsKey(field.Name)) { continue; } if (json["logSource"] == null) { if (string.IsNullOrEmpty(_arguments.LogSource)) { json.Add(new JProperty("logSource", fileName)); } else { json.Add(new JProperty("logSource", _arguments.LogSource)); } } object v = record.getValue(field.Name); if (field.DataType == typeof(DateTime)) { DateTime dt = DateTime.Parse(v.ToString()); json.Add(new JProperty(field.Name, dt)); } else { json.Add(new JProperty(field.Name, v)); } } string msg = json["Text"].ToString(); if (!string.IsNullOrEmpty(msg)) { if (_codecArguments != null && _codecArguments.Type == CodecArguments.CodecType.multiline) { _codec.Apply(msg, this); _receivedMessages++; dbe.IncrementLineCount(); } else { ProcessJson(json); dbe.IncrementLineCount(); _receivedMessages++; } } var lrn = (Int64)record.getValueEx("Index"); LogsFileDatabase.Update(dbe, true, lrn); GC.Collect(); } colMap.Clear(); // Close the recordset rs.close(); rs = null; GC.Collect(); } } catch (FileNotFoundException fnfex) { string fn = fnfex.FileName; if (!string.IsNullOrEmpty(fn) && !_fnfmap.ContainsKey(fn)) { LogManager.GetCurrentClassLogger().Warn(fnfex.Message); _fnfmap[fn] = fn; } } catch (OperationCanceledException) { break; } catch (Exception ex) { LogManager.GetCurrentClassLogger().Error(ex); } finally { try { oLogQuery = null; // Sleep if (!Stop) { syncHandle.Wait(TimeSpan.FromSeconds(_pollingIntervalInSeconds), CancelToken); } } catch (OperationCanceledException) { } catch (Exception ex1) { LogManager.GetCurrentClassLogger().Warn(ex1); } } } } Finished(); } }