private void Invalidate(int currentSourceIndex) { int numRemoved = 0; lock (_indices) { while (_indices.Count > 0) { int i = _indices.Count - 1; int sourceIndex = _indices[i]; if (sourceIndex >= currentSourceIndex) { int previousLogEntryIndex; if (_logEntryIndices.TryGetValue(sourceIndex, out previousLogEntryIndex)) { _currentLogEntryIndex = previousLogEntryIndex; } _logEntryIndices.Remove(sourceIndex); _indices.RemoveAt(i); ++numRemoved; } else { break; } } } Listeners.Invalidate(_indices.Count, numRemoved); }
private void RemoveLast() { var index = _entries.Count - 1; lock (_syncRoot) { _entries.RemoveAt(index); } Listeners.Invalidate(index, 1); }
private void Invalidate(LogFileSection sectionToInvalidate) { var firstInvalidIndex = LogLineIndex.Min(_fullSourceSection.LastIndex, sectionToInvalidate.Index); var lastInvalidIndex = LogLineIndex.Min(_fullSourceSection.LastIndex, sectionToInvalidate.LastIndex); var invalidateCount = lastInvalidIndex - firstInvalidIndex + 1; var previousSourceIndex = _currentSourceIndex; _fullSourceSection = new LogFileSection(0, (int)firstInvalidIndex); if (_fullSourceSection.Count > 0) { // It's possible (likely) that we've received an invalidation for a region of the source // that we've already processed (i.e. created indices for). If that's the case, then we need // to rewind the index. Otherwise nothing needs to be done... var newIndex = _fullSourceSection.LastIndex + 1; if (newIndex < _currentSourceIndex) { _currentSourceIndex = newIndex; } } else { _currentSourceIndex = 0; } lock (_syncRoot) { var toRemove = _indices.Count - lastInvalidIndex; if (toRemove > 0) { _indices.RemoveRange((int)firstInvalidIndex, toRemove); _currentLogEntry = new LogEntryInfo(firstInvalidIndex - 1, 0); } if (previousSourceIndex != _currentSourceIndex) { _indices.RemoveRange((int)_currentSourceIndex, _indices.Count - _currentSourceIndex); } } if (_indices.Count != _currentSourceIndex) { Log.ErrorFormat("Inconsistency detected: We have {0} indices for {1} lines", _indices.Count, _currentSourceIndex); } Listeners.Invalidate((int)firstInvalidIndex, invalidateCount); if (_fullSourceSection.Count > firstInvalidIndex) { _fullSourceSection = new LogFileSection(0, firstInvalidIndex.Value); } }
private void NotifyListeners(IEnumerable <LogFileSection> changes) { foreach (var section in changes) { if (section.IsInvalidate) { Listeners.Invalidate((int)section.Index, section.Count); } else if (section.IsReset) { Listeners.Reset(); } else { Listeners.OnRead((int)(section.Index + section.Count)); } } }
private void InvalidateOnward(int insertionIndex, ILogFile source, LogLine newLogLine) { // If the new entry is to be inserted anywhere else, then we need to invalidate // everything from that index on, insert the new line at the given index and then // issue another modification that includes everything from the newly inserted index // to the end. var count = _indices.Count - insertionIndex; Listeners.Invalidate(insertionIndex, count); // This is really interesting. // We're inserting a line somewhere in the middle which means that the logentry index of all following // entries MAY increase by 1, depending on whether or not the inserted log line is a new entry // or belongs to the previous line's entry var patchFollowingIndices = true; if (insertionIndex > 0) { var previousLine = _indices[insertionIndex - 1]; var previousLineLogFile = _sources[previousLine.LogFileIndex]; if (previousLineLogFile == source && previousLine.OriginalLogEntryIndex == newLogLine.LogEntryIndex) { patchFollowingIndices = false; } } if (patchFollowingIndices) { for (var i = 0; i < count; ++i) { var idx = _indices[insertionIndex + i]; idx.MergedLogEntryIndex++; _indices[insertionIndex + i] = idx; } } }
/// <inheritdoc /> protected override TimeSpan RunOnce(CancellationToken token) { var lastCount = _fullSourceSection.Count; bool performedWork = false; LogFileSection section; while (_pendingModifications.TryDequeue(out section) && !token.IsCancellationRequested) { if (section.IsReset) { Clear(); } else if (section.IsInvalidate) { Invalidate(section); } else { _fullSourceSection = LogFileSection.MinimumBoundingLine(_fullSourceSection, section); } performedWork = true; } if (!_fullSourceSection.IsEndOfSection(_currentSourceIndex)) { var remaining = Math.Min(_fullSourceSection.Count - _currentSourceIndex, MaximumBatchSize); var buffer = new LogLine[remaining]; _source.GetSection(new LogFileSection(_currentSourceIndex, remaining), buffer); LogLineIndex?resetIndex = null; lock (_syncRoot) { for (var i = 0; i < remaining; ++i) { var line = buffer[i]; if (_currentLogEntry.EntryIndex.IsInvalid || line.Level != LevelFlags.None || _currentLogEntryLevel == LevelFlags.None) { _currentLogEntry = _currentLogEntry.NextEntry(line.LineIndex); _currentLogEntryLevel = line.Level; } else if (_currentLogEntry.FirstLineIndex < lastCount && resetIndex == null) { var index = _currentLogEntry.FirstLineIndex; resetIndex = index; _currentLogEntryLevel = _source.GetLine((int)index).Level; } _indices.Add(_currentLogEntry); } } if (resetIndex != null) { var resetCount = lastCount - resetIndex.Value; if (resetCount > 0) { Listeners.Invalidate((int)resetIndex.Value, resetCount); } } _currentSourceIndex += remaining; } // Now we can perform a block-copy of all properties. _source.GetValues(_properties); _maxCharactersPerLine = _source.MaxCharactersPerLine; if (_indices.Count != _currentSourceIndex) { Log.ErrorFormat("Inconsistency detected: We have {0} indices for {1} lines", _indices.Count, _currentSourceIndex); } Listeners.OnRead((int)_currentSourceIndex); if (_source.EndOfSourceReached && _fullSourceSection.IsEndOfSection(_currentSourceIndex)) { SetEndOfSourceReached(); } if (performedWork) { return(TimeSpan.Zero); } return(_maximumWaitTime); }