public async Task CheckResumption(int count) { var directoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(directoryPath); using (var writer = logDirectory.GetWriter()) { var data = Enumerable.Range(0, count) .Select(n => $"THIS IS LINE NUMBER {n}") .Select(Encoding.UTF8.GetBytes); await writer.WriteManyAsync(data); } new DirectoryInfo(directoryPath).DumpDirectoryContentsToConsole(); // read events in a very inefficient way, checking that we can resume at each single line var fileNumber = -1; var bytePosition = -1; for (var counter = 0; counter < count; counter++) { var expectedText = $"THIS IS LINE NUMBER {counter}"; var eventData = logDirectory.GetReader().Read(fileNumber, bytePosition).FirstOrDefault(); var actualText = Encoding.UTF8.GetString(eventData.Data); Assert.That(actualText, Is.EqualTo(expectedText)); fileNumber = eventData.FileNumber; bytePosition = eventData.BytePosition; } }
void PumpTopic(string topic) { var cancellationToken = _cancellationTokenSource.Token; _logger.Info("Starting consumer worker for topic {topic}", topic); try { var topicDirectoryPath = Path.Combine(_directoryPath, topic); var logDirectory = new LogDirectory(topicDirectoryPath, new Settings(logger: new KafkaesqueToToposLogger(_logger))); var reader = logDirectory.GetReader(); while (!cancellationToken.IsCancellationRequested) { try { var resumePosition = _positionManager.Get(topic, 0).Result; var(fileNumber, bytePosition) = resumePosition.ToKafkaesquePosition(); _logger.Debug("Resuming consumer from file {fileNumber} byte {bytePosition}", fileNumber, bytePosition); foreach (var eventData in reader.Read(fileNumber, bytePosition, cancellationToken: cancellationToken)) { var transportMessage = JsonConvert.DeserializeObject <TransportMessage>(Encoding.UTF8.GetString(eventData.Data)); var kafkaesqueEventPosition = new KafkaesquePosition(eventData.FileNumber, eventData.BytePosition); var eventPosition = kafkaesqueEventPosition.ToPosition(topic, partition: 0); var receivedTransportMessage = new ReceivedTransportMessage(eventPosition, transportMessage.Headers, transportMessage.Body); _logger.Debug("Received event {position}", eventPosition); _consumerDispatcher.Dispatch(receivedTransportMessage); } } catch (Exception exception) { _logger.Warn(exception, "Error in consumer worker for topic {topic} - waiting 10 s", topic); Task.Delay(TimeSpan.FromSeconds(10), cancellationToken) .Wait(cancellationToken); } } } catch (OperationCanceledException) when(cancellationToken.IsCancellationRequested) { // we're done } catch (Exception exception) { _logger.Error(exception, "Unhandled exception in consumer worker for topic {topic}", topic); } finally { _logger.Info("Stopped consumer worker for topic {topic}", topic); } }
public void StartsOutEmpty() { var logDirectoryPath = GetLogDirectoryPath(); var log = new LogDirectory(logDirectoryPath); var reader = log.GetReader(); var result = reader.Read(cancellationToken: CancelAfter(TimeSpan.FromSeconds(3))).ToList(); Assert.That(result.Count(), Is.EqualTo(0)); }
public async Task Reader() { var logDirectory = new LogDirectory(@"C:\data\kafkaesque"); var logReader = logDirectory.GetReader(); foreach (var logEvent in logReader.Read()) { var bytes = logEvent.Data; // process the bytes here } }
public async Task ReadingInitiallyReturnsPrettyQuickly() { var logDirectoryPath = GetLogDirectoryPath(); var log = new LogDirectory(logDirectoryPath); var reader = log.GetReader(); var emptyList = reader.ReadEof() .TakeWhile(e => e != LogReader.EOF) .ToList(); Assert.That(emptyList.Count, Is.EqualTo(0)); }
public async Task CheckBehaviorWhenWriterIsSlow() { var logDirectoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(logDirectoryPath, new Settings(logger: new SerilogLogger())); var writer = Using(logDirectory.GetWriter()); var readEvents = new ConcurrentQueue <string>(); ThreadPool.QueueUserWorkItem(_ => { var cancellationToken = CancelOnDisposal(); try { var reader = logDirectory.GetReader(); foreach (var evt in reader.Read(cancellationToken: cancellationToken, throwWhenCancelled: true)) { var text = Encoding.UTF8.GetString(evt.Data); Console.WriteLine($"Reader loop read text: {text}"); readEvents.Enqueue(text); } } catch (OperationCanceledException) when(cancellationToken.IsCancellationRequested) { Console.WriteLine("Reader loop exited"); } }); async Task Write(string text) { Console.WriteLine($"Writing text: {text}"); await writer.WriteAsync(Encoding.UTF8.GetBytes(text)); } await Task.Run(async() => { await Write("HEJ"); await Task.Delay(TimeSpan.FromSeconds(1)); await Write("MED"); await Task.Delay(TimeSpan.FromSeconds(1)); await Write("DIG"); await Task.Delay(TimeSpan.FromSeconds(1)); await Write("MIN"); await Task.Delay(TimeSpan.FromSeconds(1)); await Write("VÆÆÆÆN"); }); await readEvents.WaitFor(q => q.Count == 5, invariantExpression : q => q.Count >= 0 && q.Count <= 5, timeoutSeconds : 50); }
public async Task CheckResumption_Eof(int count) { var directoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(directoryPath, new Settings(approximateMaximumFileLength: 4096, numberOfFilesToKeep: int.MaxValue)); using (var writer = logDirectory.GetWriter()) { var data = Enumerable.Range(0, count) .Select(n => $"THIS IS LINE NUMBER {n}") .Select(Encoding.UTF8.GetBytes); await writer.WriteManyAsync(data); } new DirectoryInfo(directoryPath).DumpDirectoryContentsToConsole(); // read events in a very inefficient way, checking that we can resume at each single line var fileNumber = -1; var bytePosition = -1; var linesRead = 0; for (var counter = 0; counter < count; counter++) { var expectedText = $"THIS IS LINE NUMBER {counter}"; var data = logDirectory.GetReader().ReadEof(fileNumber, bytePosition).FirstOrDefault(); if (data == LogReader.EOF) { break; } if (!(data is LogEvent eventData)) { continue; } var actualText = Encoding.UTF8.GetString(eventData.Data); Assert.That(actualText, Is.EqualTo(expectedText)); fileNumber = eventData.FileNumber; bytePosition = eventData.BytePosition; linesRead++; } Assert.That(linesRead, Is.EqualTo(count)); }
public async Task Reader_Cancellation() { var cancellationToken = CancelAfter(TimeSpan.FromSeconds(3)); var logDirectory = new LogDirectory(@"C:\data\kafkaesque"); var logReader = logDirectory.GetReader(); foreach (var logEvent in logReader.Read(cancellationToken: cancellationToken)) { var bytes = logEvent.Data; // process the bytes here } }
public async Task CanReadSomeEvents() { var logDirectoryPath = GetLogDirectoryPath(); var log = new LogDirectory(logDirectoryPath); var reader = log.GetReader(); using var writer = log.GetWriter(); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); var list = reader.ReadEof().TakeWhile(e => e != LogReader.EOF).ToList(); Assert.That(list.Count, Is.EqualTo(3)); }
public void GetPositionFromLogEvent() { var cancellationToken = CancelAfter(TimeSpan.FromSeconds(3)); var logDirectory = new LogDirectory(@"C:\data\kafkaesque"); var logReader = logDirectory.GetReader(); var fileNumber = -1; //< this assumes we haven't var bytePosition = -1; //< read anything before foreach (var logEvent in logReader.Read(fileNumber: fileNumber, bytePosition: bytePosition, cancellationToken: cancellationToken)) { var bytes = logEvent.Data; fileNumber = logEvent.FileNumber; bytePosition = logEvent.BytePosition; // store the file number and the byte position in your database } }
public async Task CanWriteAndReadItBack() { var logDirectoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(logDirectoryPath); var logWriter = logDirectory.GetWriter(); Using(logWriter); await logWriter.WriteAsync(new byte[] { 1, 2, 3 }, CancelAfter(TimeSpan.FromSeconds(3))); var reader = logDirectory.GetReader(); var logEvents = reader.Read(cancellationToken: CancelAfter(TimeSpan.FromSeconds(3))).ToList(); Assert.That(logEvents.Count, Is.EqualTo(1)); var logEvent = logEvents.First(); Assert.That(logEvent.Data, Is.EqualTo(new byte[] { 1, 2, 3 })); }
public async Task CanReadAndResumeAfterExperiencingEof() { var logDirectoryPath = GetLogDirectoryPath(); var log = new LogDirectory(logDirectoryPath); var reader = log.GetReader(); using var writer = log.GetWriter(); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); var firstList = reader.ReadEof().TakeWhile(e => e != LogReader.EOF).Cast <LogEvent>().ToList(); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); await writer.WriteAsync(new byte[] { 1, 2, 3 }); var(fileNumber, bytePosition) = (firstList.Last().FileNumber, firstList.Last().BytePosition); var secondList = reader.ReadEof(fileNumber, bytePosition) .TakeWhile(e => e != LogReader.EOF) .ToList(); Assert.That(firstList.Count, Is.EqualTo(3)); Assert.That(secondList.Count, Is.EqualTo(5)); }
public async Task CanReadBackEventsSpreadOverMultipleFiles_WritingEverythingInAdvance(int count) { SetLogLevel(LogEventLevel.Verbose); var messages = Enumerable.Range(0, count) .Select(n => $"{n}/This is a pretty long string message, whose purpose is solely to take up a lot of space, meaning that the events will eventually need to be placed in more than one file."); var logDirectoryPath = GetLogDirectoryPath(); var directoryInfo = new DirectoryInfo(logDirectoryPath); var logDirectory = new LogDirectory(directoryInfo); var writeStopwatch = Stopwatch.StartNew(); // write everything var writer = logDirectory.GetWriter(); Using(writer); await writer.WriteManyAsync(messages.Select(Encoding.UTF8.GetBytes)); var elapsedSecondsWriting = writeStopwatch.Elapsed.TotalSeconds; Console.WriteLine($"Wrote {count} messages in {elapsedSecondsWriting:0.0} s - that's {count/elapsedSecondsWriting:0.0} msg/s"); directoryInfo.DumpDirectoryContentsToConsole(); // read it back var reader = logDirectory.GetReader(); var readStopwatch = Stopwatch.StartNew(); var expectedMessageNumber = 0; foreach (var message in reader.Read(cancellationToken: CancelAfter(TimeSpan.FromSeconds(20))).Take(count)) { var text = Encoding.UTF8.GetString(message.Data); var parts = text.Split('/'); try { if (parts.Length != 2) { throw new FormatException( $"The text '{text}' could not be parsed - expected a number and a slash, followed by some text"); } if (!int.TryParse(parts.First(), out var actualMessageNumber)) { throw new FormatException( $"Could not parse the token '{parts.First()}' from the message '{text}' into an integer"); } if (actualMessageNumber != expectedMessageNumber) { throw new AssertionException( $"The message number {actualMessageNumber} did not match the expected: {expectedMessageNumber}"); } } catch (Exception exception) { throw new ApplicationException($"Error processing event with fileNumber = {message.FileNumber}, bytePosition = {message.BytePosition}", exception); } expectedMessageNumber++; } var elapsedSeconds = readStopwatch.Elapsed.TotalSeconds; Console.WriteLine($"Read {count} messages in {elapsedSeconds:0.0} s - that's {count/elapsedSeconds:0.0} msg/s"); Assert.That(expectedMessageNumber, Is.EqualTo(count)); }
public async Task CanReadBackEventsSpreadOverMultipleFiles_ReadingWhileWriting(int count) { SetLogLevel(LogEventLevel.Verbose); var messages = Enumerable.Range(0, count) .Select(n => $"{n}/This is a pretty long string message, whose purpose is solely to take up a lot of space, meaning that the events will eventually need to be placed in more than one file."); var logDirectoryPath = GetLogDirectoryPath(); var directoryInfo = new DirectoryInfo(logDirectoryPath); var logDirectory = new LogDirectory(directoryInfo); var doneWriting = Using(new ManualResetEvent(false)); var writer = logDirectory.GetWriter(); // ensure that the background thread has finished writing before we dispose the writer Using(new DisposableCallback(() => { using (writer) { var timeout = TimeSpan.FromMinutes(1); if (!doneWriting.WaitOne(timeout)) { Console.WriteLine($"WARNING: WRITE OPERATION WAS NOT COMPLETED WITHIN {timeout} TIMEOUT"); } } })); ThreadPool.QueueUserWorkItem(async _ => { try { await writer.WriteManyAsync(messages.Select(Encoding.UTF8.GetBytes)); directoryInfo.DumpDirectoryContentsToConsole(); } finally { doneWriting.Set(); } }); var reader = logDirectory.GetReader(); var expectedMessageNumber = 0; var stopwatch = Stopwatch.StartNew(); foreach (var message in reader.Read(cancellationToken: CancelAfter(TimeSpan.FromSeconds(20))).Take(count)) { var text = Encoding.UTF8.GetString(message.Data); var parts = text.Split('/'); try { if (parts.Length != 2) { throw new FormatException( $"The text '{text}' could not be parsed - expected a number and a slash, followed by some text"); } if (!int.TryParse(parts.First(), out var actualMessageNumber)) { throw new FormatException( $"Could not parse the token '{parts.First()}' from the message '{text}' into an integer"); } if (actualMessageNumber != expectedMessageNumber) { throw new AssertionException( $"The message number {actualMessageNumber} did not match the expected: {expectedMessageNumber}"); } } catch (Exception exception) { throw new ApplicationException($"Error processing event with fileNumber = {message.FileNumber}, bytePosition = {message.BytePosition}", exception); } expectedMessageNumber++; } var elapsedSeconds = stopwatch.Elapsed.TotalSeconds; Console.WriteLine($"Read {count} messages in {elapsedSeconds:0.0} s - that's {count/elapsedSeconds:0.0} msg/s"); Assert.That(expectedMessageNumber, Is.EqualTo(count)); }
async Task RunTest(int count, int readerCount) { SetLogLevel(LogEventLevel.Information); var logDirectoryPath = GetLogDirectoryPath(); var logDirectory = new LogDirectory(logDirectoryPath); var messagesToWrite = Enumerable.Range(0, count) .Select(n => $"THIS IS A STRING MESSAGE/{n}") .ToConcurrentQueue(); var cancellationTokenSource = new CancellationTokenSource(); Using(cancellationTokenSource); var cancellationToken = cancellationTokenSource.Token; var writer = logDirectory.GetWriter(); Using(writer); var readerThreads = Enumerable .Range(0, readerCount) .Select(n => { var logReader = logDirectory.GetReader(); var readMessages = new ConcurrentQueue <string>(); return(new { Thread = new Thread(() => { try { foreach (var logEvent in logReader.Read(cancellationToken: cancellationToken)) { readMessages.Enqueue(Encoding.UTF8.GetString(logEvent.Data)); } } catch (OperationCanceledException) when(cancellationToken.IsCancellationRequested) { Console.WriteLine($"Thread {n} was cancelled"); } catch (Exception exception) { Console.WriteLine(exception); } }) { IsBackground = true, Name = $"Reader thread {n}" }, Messages = readMessages }); }) .ToList(); readerThreads.ForEach(a => a.Thread.Start()); var writerThreads = Enumerable.Range(0, 10) .Select(n => new Thread(() => { while (messagesToWrite.TryDequeue(out var message)) { writer.WriteAsync(Encoding.UTF8.GetBytes(message), cancellationToken); } }) { Name = $"Writer thread {n}" })