public override JObject ToJson() { JObject json = new JObject( new JProperty("log", new JObject( new JProperty("messages", _receivedMessages), new JProperty("type", InputType), new JProperty("location", _arguments.Location), new JProperty("logSource", _arguments.LogSource), new JProperty("codepage", _arguments.CodePage), new JProperty("splitLongLines", _arguments.SplitLongLines), new JProperty("recurse", _arguments.Recurse), new JProperty("filedb", new JArray(from f in Files.ToList() select JObject.FromObject(LogsFileDatabase.LookupLogFile(f)))), new JProperty("files", new JArray(from f in Files.ToList() select new JValue(f))) ))); if (_codecArguments != null) { var cp = new JProperty("codec", new JArray( new JObject( new JProperty("type", _codecArguments.Type.ToString()), new JProperty("what", _codecArguments.What.ToString()), new JProperty("negate", _codecArguments.Negate), new JProperty("multilineTag", _codecArguments.MultilineTag), new JProperty("pattern", _codecArguments.Pattern)))); json.Add(cp); } return(json); }
// One thread for each kind of file to watch, i.e. "*.log,*.txt" would be two separate // threads. private void TailFileWatcher(string fileToWatch) { Dictionary <string, string> _fnfmap = new Dictionary <string, string>(); using (var syncHandle = new ManualResetEventSlim()) { // Execute the query while (!Stop && !CancelToken.IsCancellationRequested) { try { if (!CancelToken.IsCancellationRequested) { var isWildcardPattern = fileToWatch.Contains('*'); string path = Path.GetDirectoryName(fileToWatch); string name = Path.GetFileName(fileToWatch); if (string.IsNullOrEmpty(path)) { path = "."; } LogManager.GetCurrentClassLogger().Trace(":{0} Tailing File: {1}", Thread.CurrentThread.ManagedThreadId, Path.Combine(path, name)); // Ok, we have a potential file filter here as 'fileToWatch' could be foo.log or *.log SearchOption so = SearchOption.TopDirectoryOnly; if (_arguments.Recurse == -1) { so = SearchOption.AllDirectories; } foreach (string fileName in Directory.GetFiles(path, name, so)) { var dbe = LogsFileDatabase.LookupLogFile(fileName); // We only spin up 1 thread for a file we haven't yet seen. if (isWildcardPattern && !HaveSeenFile(fileName) && dbe.NewFile) { LogManager.GetCurrentClassLogger().Debug(":{0} Starting Thread Tailing File: {1}", Thread.CurrentThread.ManagedThreadId, dbe.FileName); LogsFileDatabase.Update(dbe, false, dbe.LastPosition); Task.Factory.StartNew(() => TailFileWatcher(fileName)); } else if (!isWildcardPattern) { FileInfo fi = new FileInfo(dbe.FileName); SaveVisitedFileName(fileName); //LogManager.GetCurrentClassLogger().Info("Located File: {0}, New: {1}", dbe.FileName, dbe.NewFile); long length = fi.Length; bool logHasRolled = false; if (fi.Length < dbe.LastPosition || fi.CreationTimeUtc != dbe.CreationTimeUtc) { LogManager.GetCurrentClassLogger().Info("{0}: Log has Rolled: {1}", Thread.CurrentThread.ManagedThreadId, dbe.FileName); logHasRolled = true; LogsFileDatabase.Roll(dbe); } // Log has rolled or this is a file we are seeing for the first time. bool processWholeFile = logHasRolled || !dbe.ProcessedFile || dbe.NewFile; if (processWholeFile) { LogsFileDatabase.Update(dbe, true, 0); LogManager.GetCurrentClassLogger().Debug("{0}: Process Whole File: {1}", Thread.CurrentThread.ManagedThreadId, dbe.FileName); TailFileContents(dbe.FileName, 0, dbe); } else { TailFileContents(dbe.FileName, dbe.LastPosition, dbe); } } } } } catch (FileNotFoundException fnfex) { string fn = fnfex.FileName; if (!_fnfmap.ContainsKey(fn)) { LogManager.GetCurrentClassLogger().Warn(fnfex.Message); } _fnfmap[fn] = fn; } catch (IOException ioex) { LogManager.GetCurrentClassLogger().Debug("Log has rolled: {0}", ioex.Message); } catch (OperationCanceledException) { break; } catch (Exception ex) { LogManager.GetCurrentClassLogger().Error(ex); } finally { try { if (!Stop) { syncHandle.Wait(TimeSpan.FromSeconds(_pollingIntervalInSeconds), CancelToken); } } catch (OperationCanceledException) { Stop = true; } catch (Exception ex1) { LogManager.GetCurrentClassLogger().Warn(ex1); } } } } Finished(); }
private void FileWatcher(string fileToWatch) { var iFmt = new TextLineInputFormat() { iCodepage = _arguments.CodePage, splitLongLines = _arguments.SplitLongLines, recurse = _arguments.Recurse }; Dictionary <string, string> _fnfmap = new Dictionary <string, string>(); using (var syncHandle = new ManualResetEventSlim()) { // Execute the query while (!Stop) { var oLogQuery = new LogQuery(); if (!CancelToken.IsCancellationRequested) { try { var qfiles = string.Format("SELECT Distinct [LogFilename] FROM {0}", fileToWatch); var rsfiles = oLogQuery.Execute(qfiles, iFmt); for (; !rsfiles.atEnd(); rsfiles.moveNext()) { var record = rsfiles.getRecord(); string logName = record.getValue("LogFilename") as string; FileInfo fi = new FileInfo(logName); var dbe = LogsFileDatabase.LookupLogFile(logName); SaveVisitedFileName(dbe.FileName); DateTime creationTime = fi.CreationTimeUtc; bool logHasRolled = dbe.NewFile || (creationTime != dbe.CreationTimeUtc || fi.Length < dbe.LastPosition); if (logHasRolled) { LogManager.GetCurrentClassLogger().Info("Log {0} has rolled", logName); LogsFileDatabase.Roll(dbe); } // Log has rolled or this is a new file, or we haven't processed yet. bool processWholeFile = logHasRolled || !dbe.ProcessedFile; if (processWholeFile) { LogsFileDatabase.Update(dbe, true, 0); } } rsfiles.close(); foreach (string fileName in Files.ToList()) { var dbe = LogsFileDatabase.LookupLogFile(fileName); var lastRecordNumber = dbe.LastPosition; var query = string.Format("SELECT * FROM {0} where Index > {1}", fileName, lastRecordNumber); var rs = oLogQuery.Execute(query, iFmt); Dictionary <string, int> colMap = new Dictionary <string, int>(); for (int col = 0; col < rs.getColumnCount(); col++) { string colName = rs.getColumnName(col); colMap[colName] = col; } // Browse the recordset for (; !rs.atEnd(); rs.moveNext()) { var record = rs.getRecord(); var json = new JObject(); foreach (var field in _arguments.Fields) { if (!colMap.ContainsKey(field.Name)) { continue; } if (json["logSource"] == null) { if (string.IsNullOrEmpty(_arguments.LogSource)) { json.Add(new JProperty("logSource", fileName)); } else { json.Add(new JProperty("logSource", _arguments.LogSource)); } } object v = record.getValue(field.Name); if (field.DataType == typeof(DateTime)) { DateTime dt = DateTime.Parse(v.ToString()); json.Add(new JProperty(field.Name, dt)); } else { json.Add(new JProperty(field.Name, v)); } } string msg = json["Text"].ToString(); if (!string.IsNullOrEmpty(msg)) { if (_codecArguments != null && _codecArguments.Type == CodecArguments.CodecType.multiline) { _codec.Apply(msg, this); _receivedMessages++; dbe.IncrementLineCount(); } else { ProcessJson(json); dbe.IncrementLineCount(); _receivedMessages++; } } var lrn = (Int64)record.getValueEx("Index"); LogsFileDatabase.Update(dbe, true, lrn); GC.Collect(); } colMap.Clear(); // Close the recordset rs.close(); rs = null; GC.Collect(); } } catch (FileNotFoundException fnfex) { string fn = fnfex.FileName; if (!string.IsNullOrEmpty(fn) && !_fnfmap.ContainsKey(fn)) { LogManager.GetCurrentClassLogger().Warn(fnfex.Message); _fnfmap[fn] = fn; } } catch (OperationCanceledException) { break; } catch (Exception ex) { LogManager.GetCurrentClassLogger().Error(ex); } finally { try { oLogQuery = null; // Sleep if (!Stop) { syncHandle.Wait(TimeSpan.FromSeconds(_pollingIntervalInSeconds), CancelToken); } } catch (OperationCanceledException) { } catch (Exception ex1) { LogManager.GetCurrentClassLogger().Warn(ex1); } } } } Finished(); } }
private void TailFileContents(string fileName, long offset, LogsFileDatabaseEntry dbe) { const int bufSize = 16535; long prevLen = offset; FileInfo fi = new FileInfo(fileName); if (!fi.Exists) { return; } LogManager.GetCurrentClassLogger().Trace(":{0} Tailing File: {1} as Pos: {2}", Thread.CurrentThread.ManagedThreadId, fileName, prevLen); using (var stream = new FileStream(fi.FullName, FileMode.Open, FileAccess.Read, FileShare.Delete | FileShare.ReadWrite)) { stream.Seek(prevLen, SeekOrigin.Begin); char[] buffer = new char[bufSize]; StringBuilder current = new StringBuilder(); using (StreamReader sr = new StreamReader(stream)) { int nRead; do { // Read a buffered amount nRead = sr.ReadBlock(buffer, 0, bufSize); for (int i = 0; i < nRead; ++i) { // We need the terminator! if (buffer[i] == '\n' || buffer[i] == '\r') { if (current.Length > 0) { string line = string.Concat(dbe.Previous, current); var json = new JObject(); if (json["logSource"] == null) { if (string.IsNullOrEmpty(_arguments.LogSource)) { json.Add(new JProperty("logSource", fileName)); } else { json.Add(new JProperty("logSource", _arguments.LogSource)); } } //LogManager.GetCurrentClassLogger().Debug(":{0} File: {1}:{2} {3}", Thread.CurrentThread.ManagedThreadId, fileName, dbe.LinesProcessed, line); // We've processed the partial input dbe.Previous = ""; json["Text"] = line; json["Index"] = dbe.LinesProcessed; json["LogFileName"] = fileName; if (_codecArguments != null && _codecArguments.Type == CodecArguments.CodecType.multiline) { try { _codec.Apply(line, this); Interlocked.Increment(ref _receivedMessages); dbe.IncrementLineCount(); } catch (Exception ex) { Interlocked.Increment(ref _errorCount); LogManager.GetCurrentClassLogger().ErrorException("Filter Error", ex); } } else { try { ProcessJson(json); dbe.IncrementLineCount(); Interlocked.Increment(ref _receivedMessages); LogsFileDatabase.Update(dbe, true, sr.BaseStream.Position); } catch (Exception ex) { Interlocked.Increment(ref _errorCount); LogManager.GetCurrentClassLogger().ErrorException("Process Error", ex); } } } current = new StringBuilder(); } else // Copy character into the buffer { current.Append(buffer[i]); } } } while (nRead > 0); // We didn't encounter the newline, so save it. if (current.Length > 0) { dbe.Previous = current.ToString(); } } } }