/// <summary> /// Bereinigt alle veralteten Protokolleinträge. /// </summary> internal void CleanupLogEntries() { // Check time if (DateTime.UtcNow < m_nextLogCleanup) { return; } // Not again for now m_nextLogCleanup = DateTime.UtcNow.AddDays(1); // For cleanup var firstValid = DateTime.Now.Date.AddDays(-7 * VCRConfiguration.Current.LogLifeTime).ToString(LogEntryDateFormat); // Load all jobs foreach (var file in LogDirectory.GetFiles("*" + VCRRecordingInfo.FileSuffix)) { if (file.Name.CompareTo(firstValid) < 0) { try { // Delete the log entry file.Delete(); } catch (Exception e) { // Report error VCRServer.Log(e); } } } }
public async Task CheckResumption(int count) { var directoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(directoryPath); using (var writer = logDirectory.GetWriter()) { var data = Enumerable.Range(0, count) .Select(n => $"THIS IS LINE NUMBER {n}") .Select(Encoding.UTF8.GetBytes); await writer.WriteManyAsync(data); } new DirectoryInfo(directoryPath).DumpDirectoryContentsToConsole(); // read events in a very inefficient way, checking that we can resume at each single line var fileNumber = -1; var bytePosition = -1; for (var counter = 0; counter < count; counter++) { var expectedText = $"THIS IS LINE NUMBER {counter}"; var eventData = logDirectory.GetReader().Read(fileNumber, bytePosition).FirstOrDefault(); var actualText = Encoding.UTF8.GetString(eventData.Data); Assert.That(actualText, Is.EqualTo(expectedText)); fileNumber = eventData.FileNumber; bytePosition = eventData.BytePosition; } }
public async Task CompareSingleVersusMany(bool single, int count) { SetLogLevel(LogEventLevel.Information); var messages = Enumerable.Range(0, count).Select(n => $"THIS IS A STRING MESSAGE EVENT/{n}").ToList(); var logDirectory = new LogDirectory(GetLogDirectoryPath()); var stopwatch = Stopwatch.StartNew(); using (var writer = logDirectory.GetWriter()) { Log.Information("Writing"); if (single) { foreach (var message in messages) { await writer.WriteAsync(Encoding.UTF8.GetBytes(message)); } } else { await writer.WriteManyAsync(messages.Select(Encoding.UTF8.GetBytes)); } Log.Information("Done writing!"); await Task.Delay(TimeSpan.FromSeconds(0.1)); } var elapsedSeconds = stopwatch.Elapsed.TotalSeconds; Console.WriteLine($"Wrote {count} msgs in {elapsedSeconds:0.0} s - that's {count / elapsedSeconds:0.0} msg/s"); }
public ArchiveLogFile(LogDirectory directory, string path, SevenZipArchiveFile archiveFile) : base(directory) { this.archiveFile = archiveFile; this.filePath = path; this.archiveFile.Cancel = true; }
public async Task CanCreateFilesOfDifferentSize(int approxFileLength) { var logDirectoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(logDirectoryPath, new Settings(approximateMaximumFileLength: approxFileLength)); using (var writer = logDirectory.GetWriter()) { var data = Enumerable.Range(0, 1000) .Select(n => $"THIS IS LINE NUMBER {n} OUT OF A LOT") .Select(Encoding.UTF8.GetBytes); await writer.WriteManyAsync(data); } var directory = new DirectoryInfo(logDirectoryPath); directory.DumpDirectoryContentsToConsole(); var dataFiles = directory.GetFiles("*.dat").OrderBy(f => f.FullName); foreach (var dataFile in dataFiles) { Assert.That(dataFile.Length, Is.LessThan(1.1 * approxFileLength)); } }
public override void Post(Request request) { if (String.IsNullOrEmpty(LogDirectory) || !Enabled) { return; } StreamWriter writer = null; lock (LockObject) { try { var now = DateTime.UtcNow; var logDir = LogDirectory.EndsWith("\\") ? LogDirectory : LogDirectory + "\\"; var logFile = logDir + now.ToString("yyyy-dd-MM"); writer = new StreamWriter(File.Open(logDir, FileMode.Append, FileAccess.Write, FileShare.Read), Encoding.ASCII); if (writer.BaseStream.Position == 0) { // Write header writer.WriteLine("#Software: " + Version.FullName); writer.WriteLine("#Version: 1.0"); // W3C Log version writer.WriteLine("#Date: " + DateTime.UtcNow.ToShortDateString()); writer.WriteLine("#Fields: date time c-ip cs-method cs-uri-stem sc-status"); } writer.WriteLine("{0:d} {0:T} {1} {2} {3} {4}", now, RemoteIP, request.Verb, request.Url, request.ResponseStatus); } catch { // Bad catch all block! } finally { try { if (writer != null) { writer.Close(); } } catch { // Bad catch all block 2! } } } }
public static bool Initialize() { try { if (string.IsNullOrWhiteSpace(OutputPath) || OutputDirectory == null) { throw new ArgumentException("An empty or otherwise invalid path was detected for the Output Directory."); } if (string.IsNullOrWhiteSpace(WorkingPath) || WorkingDirectory == null) { throw new ArgumentException("An empty or otherwise invalid path was detected for the Working Directory."); } if (string.IsNullOrWhiteSpace(LogPath)) { throw new ArgumentException("An empty or otherwise invalid path was detected for the Log directory."); } if (LogFile == null) { throw new ArgumentException("An empty or otherwise invalid path was detected for the Output Log file."); } if (ErrorFile == null) { throw new ArgumentException("An empty or otherwise invalid path was detected for the Output Error file."); } if (!OutputDirectory.Exists) { OutputDirectory.Create(); } if (!WorkingDirectory.Exists) { WorkingDirectory.Create(); } if (!LogDirectory.Exists) { LogDirectory.Create(); } if (!LogFile.Exists) { LogFile.Create(); } if (!ErrorFile.Exists) { ErrorFile.Create(); } return(OutputDirectory.Exists && WorkingDirectory.Exists && LogFile.Exists && ErrorFile.Exists); } catch (Exception e) { Console.WriteLine($"\r\n{e.Message}\r\n{e}"); throw; } }
void PumpTopic(string topic) { var cancellationToken = _cancellationTokenSource.Token; _logger.Info("Starting consumer worker for topic {topic}", topic); try { var topicDirectoryPath = Path.Combine(_directoryPath, topic); var logDirectory = new LogDirectory(topicDirectoryPath, new Settings(logger: new KafkaesqueToToposLogger(_logger))); var reader = logDirectory.GetReader(); while (!cancellationToken.IsCancellationRequested) { try { var resumePosition = _positionManager.Get(topic, 0).Result; var(fileNumber, bytePosition) = resumePosition.ToKafkaesquePosition(); _logger.Debug("Resuming consumer from file {fileNumber} byte {bytePosition}", fileNumber, bytePosition); foreach (var eventData in reader.Read(fileNumber, bytePosition, cancellationToken: cancellationToken)) { var transportMessage = JsonConvert.DeserializeObject <TransportMessage>(Encoding.UTF8.GetString(eventData.Data)); var kafkaesqueEventPosition = new KafkaesquePosition(eventData.FileNumber, eventData.BytePosition); var eventPosition = kafkaesqueEventPosition.ToPosition(topic, partition: 0); var receivedTransportMessage = new ReceivedTransportMessage(eventPosition, transportMessage.Headers, transportMessage.Body); _logger.Debug("Received event {position}", eventPosition); _consumerDispatcher.Dispatch(receivedTransportMessage); } } catch (Exception exception) { _logger.Warn(exception, "Error in consumer worker for topic {topic} - waiting 10 s", topic); Task.Delay(TimeSpan.FromSeconds(10), cancellationToken) .Wait(cancellationToken); } } } catch (OperationCanceledException) when(cancellationToken.IsCancellationRequested) { // we're done } catch (Exception exception) { _logger.Error(exception, "Unhandled exception in consumer worker for topic {topic}", topic); } finally { _logger.Info("Stopped consumer worker for topic {topic}", topic); } }
public void StartsOutEmpty() { var logDirectoryPath = GetLogDirectoryPath(); var log = new LogDirectory(logDirectoryPath); var reader = log.GetReader(); var result = reader.Read(cancellationToken: CancelAfter(TimeSpan.FromSeconds(3))).ToList(); Assert.That(result.Count(), Is.EqualTo(0)); }
public async Task Writer() { var logDirectory = new LogDirectory(@"C:\data\kafkaesque"); // hold on to this bad boy until your application shuts down using (var logWriter = logDirectory.GetWriter()) { await logWriter.WriteAsync(new byte[] { 1, 2, 3 }); } }
private static void VerifyTargetDirectory() { if (LogDirectory != null) { LogDirectory.Refresh(); if (!LogDirectory.Exists) { LogDirectory.Create(); } } }
public void CannotCreateMoreThanOneWriter() { var logDirectoryPath = GetLogDirectoryPath(); var log = new LogDirectory(logDirectoryPath, new Settings(writeLockAcquisitionTimeoutSeconds: 3)); var writer = log.GetWriter(); Using(writer); var exception = Assert.Throws <TimeoutException>(() => log.GetWriter()); Console.WriteLine(exception); }
public async Task ReadingInitiallyReturnsPrettyQuickly() { var logDirectoryPath = GetLogDirectoryPath(); var log = new LogDirectory(logDirectoryPath); var reader = log.GetReader(); var emptyList = reader.ReadEof() .TakeWhile(e => e != LogReader.EOF) .ToList(); Assert.That(emptyList.Count, Is.EqualTo(0)); }
Lazy <LogWriter> CreateWriter(string topic) { var topicDirectoryPath = Path.Combine(_directoryPath, topic); return(new Lazy <LogWriter>(() => { var logDirectory = new LogDirectory(topicDirectoryPath, new Settings(logger: new KafkaesqueToToposLogger(_logger))); _logger.Debug("Initializing new Kafkaesque writer with path {directoryPath}", topicDirectoryPath); return logDirectory.GetWriter(); })); }
public async Task Reader() { var logDirectory = new LogDirectory(@"C:\data\kafkaesque"); var logReader = logDirectory.GetReader(); foreach (var logEvent in logReader.Read()) { var bytes = logEvent.Data; // process the bytes here } }
public async Task CheckBehaviorWhenWriterIsSlow() { var logDirectoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(logDirectoryPath, new Settings(logger: new SerilogLogger())); var writer = Using(logDirectory.GetWriter()); var readEvents = new ConcurrentQueue <string>(); ThreadPool.QueueUserWorkItem(_ => { var cancellationToken = CancelOnDisposal(); try { var reader = logDirectory.GetReader(); foreach (var evt in reader.Read(cancellationToken: cancellationToken, throwWhenCancelled: true)) { var text = Encoding.UTF8.GetString(evt.Data); Console.WriteLine($"Reader loop read text: {text}"); readEvents.Enqueue(text); } } catch (OperationCanceledException) when(cancellationToken.IsCancellationRequested) { Console.WriteLine("Reader loop exited"); } }); async Task Write(string text) { Console.WriteLine($"Writing text: {text}"); await writer.WriteAsync(Encoding.UTF8.GetBytes(text)); } await Task.Run(async() => { await Write("HEJ"); await Task.Delay(TimeSpan.FromSeconds(1)); await Write("MED"); await Task.Delay(TimeSpan.FromSeconds(1)); await Write("DIG"); await Task.Delay(TimeSpan.FromSeconds(1)); await Write("MIN"); await Task.Delay(TimeSpan.FromSeconds(1)); await Write("VÆÆÆÆN"); }); await readEvents.WaitFor(q => q.Count == 5, invariantExpression : q => q.Count >= 0 && q.Count <= 5, timeoutSeconds : 50); }
/// <summary> /// Ermittelt alle Protokolleinträge für einen bestimmten Zeitraum. /// </summary> /// <param name="firstDate">Erster zu berücksichtigender Tag.</param> /// <param name="lastDate">Letzter zu berücksichtigender Tag.</param> /// <param name="profile">Profile, dessen Protokolle ausgelesen werden sollen.</param> /// <returns>Liste aller Protokolleinträge für den gewünschten Zeitraum.</returns> internal List <VCRRecordingInfo> FindLogEntries(DateTime firstDate, DateTime lastDate, ProfileState profile) { // Create list var logs = new List <VCRRecordingInfo>(); // Create search patterns var last = lastDate.AddDays(1).ToString(LogEntryDateFormat); var first = firstDate.ToString(LogEntryDateFormat); // Load all jobs foreach (var file in LogDirectory.GetFiles("*" + VCRRecordingInfo.FileSuffix)) { // Skip if (file.Name.CompareTo(first) < 0) { continue; } if (file.Name.CompareTo(last) >= 0) { continue; } // Load item var logEntry = SerializationTools.Load <VCRRecordingInfo>(file); if (logEntry == null) { continue; } // Check if (profile != null) { if (!profile.IsResponsibleFor(logEntry.Source)) { continue; } } // Attach the name logEntry.LogIdentifier = file.Name.ToLower(); // Remember logs.Add(logEntry); } // Sort by start time logs.Sort(VCRRecordingInfo.ComparerByStarted); // Report return(logs); }
public async Task CheckResumption_Eof(int count) { var directoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(directoryPath, new Settings(approximateMaximumFileLength: 4096, numberOfFilesToKeep: int.MaxValue)); using (var writer = logDirectory.GetWriter()) { var data = Enumerable.Range(0, count) .Select(n => $"THIS IS LINE NUMBER {n}") .Select(Encoding.UTF8.GetBytes); await writer.WriteManyAsync(data); } new DirectoryInfo(directoryPath).DumpDirectoryContentsToConsole(); // read events in a very inefficient way, checking that we can resume at each single line var fileNumber = -1; var bytePosition = -1; var linesRead = 0; for (var counter = 0; counter < count; counter++) { var expectedText = $"THIS IS LINE NUMBER {counter}"; var data = logDirectory.GetReader().ReadEof(fileNumber, bytePosition).FirstOrDefault(); if (data == LogReader.EOF) { break; } if (!(data is LogEvent eventData)) { continue; } var actualText = Encoding.UTF8.GetString(eventData.Data); Assert.That(actualText, Is.EqualTo(expectedText)); fileNumber = eventData.FileNumber; bytePosition = eventData.BytePosition; linesRead++; } Assert.That(linesRead, Is.EqualTo(count)); }
public async Task Reader_Cancellation() { var cancellationToken = CancelAfter(TimeSpan.FromSeconds(3)); var logDirectory = new LogDirectory(@"C:\data\kafkaesque"); var logReader = logDirectory.GetReader(); foreach (var logEvent in logReader.Read(cancellationToken: cancellationToken)) { var bytes = logEvent.Data; // process the bytes here } }
public static async Task OpenExplorer(LogDirectory directory) { if (!directory.DirInfo.Exists) { return; } var startInfo = new ProcessStartInfo(directory.DirInfo.FullName) { UseShellExecute = true }; using (var p = new Process()) { p.StartInfo = startInfo; p.Start(); } }
public async Task CanReadSomeEvents() { var logDirectoryPath = GetLogDirectoryPath(); var log = new LogDirectory(logDirectoryPath); var reader = log.GetReader(); using var writer = log.GetWriter(); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); var list = reader.ReadEof().TakeWhile(e => e != LogReader.EOF).ToList(); Assert.That(list.Count, Is.EqualTo(3)); }
// Removes the oldest log files if the total number of files is greater than intended public void CleanDirectory() { FileInfo[] files = LogDirectory.GetFiles(); if (files.Length > BackLogDays) { int difference = files.Length - BackLogDays; int extLength = FileExt.Length; Array.Sort(files, (x, y) => DateTime.Parse(x.Name.Substring(0, x.Name.Length - extLength)) .CompareTo(DateTime.Parse(y.Name.Substring(0, y.Name.Length - extLength)))); for (int i = 0; i < difference; i++) { files[i].Delete(); } } }
public async Task WhatHappensIfWeWriteALot(int iterations, bool parallel) { SetLogLevel(LogEventLevel.Information); var logDirectoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(logDirectoryPath); var logWriter = logDirectory.GetWriter(); Using(logWriter); var bytes = Enumerable.Range(0, 20000) .Select(o => (byte)(iterations % 256)) .ToArray(); var stopwatch = Stopwatch.StartNew(); if (parallel) { await Task.WhenAll(Enumerable.Range(0, iterations) .Select(i => logWriter.WriteAsync(bytes))); } else { for (var counter = 0; counter < iterations; counter++) { await logWriter.WriteAsync(bytes); } } var directoryInfo = new DirectoryInfo(logDirectoryPath); directoryInfo.DumpDirectoryContentsToConsole(); var files = directoryInfo.GetFiles(); var elapsedSeconds = stopwatch.Elapsed.TotalSeconds; var totalBytesWritten = files.Sum(a => a.Length); Console.WriteLine($"Wrote {totalBytesWritten.FormatAsHumanReadableSize()} in {elapsedSeconds:0.0} s - that's {((long)(totalBytesWritten / elapsedSeconds)).FormatAsHumanReadableSize()}/s"); }
public async Task CanDeleteOldFiles() { var directoryInfo = new DirectoryInfo(GetLogDirectoryPath()); var settings = new Settings(numberOfFilesToKeep: 10, approximateMaximumFileLength: 32768); var logDirectory = new LogDirectory(directoryInfo, settings); using (var writer = logDirectory.GetWriter()) { var data = Enumerable.Range(0, 10000) .Select(n => $"THIS IS LINE NUMBER {n} OUT OF QUITE A FEW") .Select(Encoding.UTF8.GetBytes); await writer.WriteManyAsync(data); } directoryInfo.DumpDirectoryContentsToConsole(); var dataFiles = directoryInfo.GetFiles("*.dat").ToList(); Assert.That(dataFiles.Count, Is.EqualTo(10)); }
public void GetPositionFromLogEvent() { var cancellationToken = CancelAfter(TimeSpan.FromSeconds(3)); var logDirectory = new LogDirectory(@"C:\data\kafkaesque"); var logReader = logDirectory.GetReader(); var fileNumber = -1; //< this assumes we haven't var bytePosition = -1; //< read anything before foreach (var logEvent in logReader.Read(fileNumber: fileNumber, bytePosition: bytePosition, cancellationToken: cancellationToken)) { var bytes = logEvent.Data; fileNumber = logEvent.FileNumber; bytePosition = logEvent.BytePosition; // store the file number and the byte position in your database } }
public async Task CanWriteAndReadItBack() { var logDirectoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(logDirectoryPath); var logWriter = logDirectory.GetWriter(); Using(logWriter); await logWriter.WriteAsync(new byte[] { 1, 2, 3 }, CancelAfter(TimeSpan.FromSeconds(3))); var reader = logDirectory.GetReader(); var logEvents = reader.Read(cancellationToken: CancelAfter(TimeSpan.FromSeconds(3))).ToList(); Assert.That(logEvents.Count, Is.EqualTo(1)); var logEvent = logEvents.First(); Assert.That(logEvent.Data, Is.EqualTo(new byte[] { 1, 2, 3 })); }
private static void Main(string[] args) { var options = new Options(); if (Parser.Default.ParseArgumentsStrict(args, options)) { var stateInputPath = options.StateInputPath; var inputPath = options.InputPath; var stateOutputPath = options.StateOutputPath; var outputPath = options.OutputPath; var publisher = new Publisher(stateInputPath, stateOutputPath, outputPath); var logFiles = new LogDirectory(inputPath).Publish(); logFiles .Subscribe(logFile => { logFile.Subscribe(line => { publisher.Add(line); }); }, () => { publisher.Dispose(); }); logFiles .Where(logFile => options.Delete) .Subscribe(logFile => { File.Delete(logFile.Path); }); logFiles.Connect(); } else { throw new ArgumentException(); } }
public async Task CanReadAndResumeAfterExperiencingEof() { var logDirectoryPath = GetLogDirectoryPath(); var log = new LogDirectory(logDirectoryPath); var reader = log.GetReader(); using var writer = log.GetWriter(); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); var firstList = reader.ReadEof().TakeWhile(e => e != LogReader.EOF).Cast <LogEvent>().ToList(); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); var(fileNumber, bytePosition) = (firstList.Last().FileNumber, firstList.Last().BytePosition); var secondList = reader.ReadEof(fileNumber, bytePosition) .TakeWhile(e => e != LogReader.EOF) .ToList(); Assert.That(firstList.Count, Is.EqualTo(3)); Assert.That(secondList.Count, Is.EqualTo(5)); }
static async Task Main() { Log.Logger = new LoggerConfiguration() .WriteTo.ColoredConsole() .MinimumLevel.Verbose() .CreateLogger(); var count = 100; var logDirectory = new LogDirectory(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "data")); var stopwatch = Stopwatch.StartNew(); using (var writer = logDirectory.GetWriter()) { var messages = Enumerable.Range(0, count).Select(n => $"THIS IS MESSAGE NUMBER {n}"); await writer.WriteManyAsync(messages.Select(Encoding.UTF8.GetBytes)); //await Task.Delay(TimeSpan.FromSeconds(.1)); } var elapsedSeconds = stopwatch.Elapsed.TotalSeconds; Console.WriteLine($"Wrote {count} messages in {elapsedSeconds:0.0} s - that's {count/elapsedSeconds:0.0} msg/s"); }
/// <summary> /// Erzeugt eine neue Verwaltungsinstanz und lädt die aktuellen Auftragsliste. /// </summary> /// <param name="rootDirectory">Meldet das Verzeichnis, unterhalb dessen alle /// Aufträge und Protokolle angelegt werden.</param> /// <param name="server">Die VCR.NET Instanz, der diese Verwaltung zugeordnet ist.</param> internal JobManager(DirectoryInfo rootDirectory, VCRServer server) { // Remember RootDirectory = rootDirectory; Server = server; // Create root directory RootDirectory.Create(); // Create working directories CollectorDirectory.Create(); ArchiveDirectory.Create(); JobDirectory.Create(); LogDirectory.Create(); // Load all jobs foreach (var job in VCRJob.Load(JobDirectory)) { if (job.UniqueID.HasValue) { m_Jobs[job.UniqueID.Value] = job; } } }