Пример #1
0
        /// <summary>
        /// Updates the index for the log
        /// </summary>
        /// <param name="progress">Action to report indexing progress</param>
        /// <param name="cancellationToken">CancellationToken for cancelling the index update</param>
        public bool Update(Action <double> progress, CancellationToken cancellationToken)
        {
            var sw = Stopwatch.StartNew();

            var updated = false;

            IsUpdating = true;

            // find all files of the log on disk
            var files = FindFiles();

            // remove indexed file which are no longer present
            foreach (var file in _index.Files.Where(f => !files.Any(p => p.Item1 == f.Item1 && p.Item2 == f.Item2)))
            {
                _index.Remove(file.Item1, file.Item2);
                updated = true;
            }

            // find the files which were changed since the last indexing or which are not present in the index
            var indexFiles     = _index.Files;
            var indexTimestamp = indexFiles.Select(f => f.Item3).Concat(new[] { DateTime.MinValue }).Max();
            var changedFiles   = files
                                 .GroupJoin(indexFiles, f => f.Item1 + f.Item2, f => f.Item1 + f.Item2, (file, index) => new { file, index = index.FirstOrDefault() })
                                 .Where(a => a.index == null || a.file.Item3 > a.index.Item3)
                                 .Select(a => a.file)
                                 .ToList();

            // when the tailing log file has changed just process the new lines, otherwise completely index all changed files
            if (changedFiles.Count > 0)
            {
                // update the maximum progress value
                _progressMaximum = changedFiles.Sum(f => f.Item4);

                // update the index with the changed files
                var progressCount   = 0L;
                var stateCollection = new ConcurrentStack <object[]>();
                Parallel.ForEach(changedFiles,
                                 element =>
                {
                    // create the indexer state for the file
                    var state = _indexers.Select(i => i.Initialize(element.Item5, element.Item1, element.Item2, element.Item4, element.Item3, false)).ToArray();

                    // create a stream for the file or archive member
                    var length = 0L;
                    var stream = OpenFileStream(element.Item1, element.Item2, out length);

                    // read and tokenize the file
                    CountingReader streamreader = null;
                    TokenReader tokenreader     = null;

                    var sw2 = Stopwatch.StartNew();
                    try
                    {
                        var buffer   = new Token[1024];
                        streamreader = new CountingReader(stream, p => progress?.Invoke(Interlocked.Add(ref progressCount, p) * 100.0 / _progressMaximum), cancellationToken);
                        tokenreader  = new TokenReader(streamreader, element.Item1, element.Item2, 0, streamreader.CurrentEncoding);

                        var count = 0;
                        while ((count = tokenreader.Read(buffer, 0, buffer.Length)) > 0)
                        {
                            for (var i = 0; i < _indexers.Length; i++)
                            {
                                _indexers[i].Update(state[i], buffer, count);
                            }
                        }
                    }
                    finally
                    {
                        if (tokenreader != null)
                        {
                            tokenreader.Dispose();
                        }
                        else if (streamreader != null)
                        {
                            streamreader.Dispose();
                        }
                        sw2.Stop();
                        _logger.Info($"Log::Read(): Reading {element.Item1}:{element.Item2} completed in {sw2.ElapsedMilliseconds}ms");
                    }

                    // complete the files on the indexers
                    for (var i = 0; i < _indexers.Length; i++)
                    {
                        _indexers[i].Complete(state[i]);
                    }
                });

                // complete the indexers
                for (var i = 0; i < _indexers.Length; i++)
                {
                    _indexers[i].Complete();
                }


                updated = true;
            }

            // send the collection changed event
            if (updated)
            {
                CollectionChanged?.Invoke(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Reset));
            }

            sw.Stop();

            // signal the end of the update
            IsUpdating = false;

            // raise property changes
            if (updated)
            {
                _logger.Info($"Log::Update(): Updating index completed in {sw.ElapsedMilliseconds}ms");
                PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(Files)));
                PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(Count)));
                PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(Tokens)));
            }
            else
            {
                _logger.Info("Log::Update(): Index is up to date");
            }

            return(updated);
        }