protected override void PerformRebuild(IndexingOptions indexingOptions, CancellationToken cancellationToken) { CrawlingLog.Log.Debug($"{LogPreffix} {Name} PerformRebuild()"); #if (SITECORE8) CrawlingLog.Log.Debug($"PerformRebuild - Disposed - {isDisposed}", null); CrawlingLog.Log.Debug($"PerformRebuild - Initialized - {initialized}", null); #endif if (!base.ShouldStartIndexing(indexingOptions)) { return; } lock (this.GetFullRebuildLockObject()) { using (IProviderUpdateContext providerUpdateContext = this.CreateFullRebuildContext()) { CrawlingLog.Log.Warn($"[Index={this.Name}] Reset Started", null); this.DoReset(providerUpdateContext); CrawlingLog.Log.Warn($"[Index={this.Name}] Reset Ended", null); CrawlingLog.Log.Warn($"[Index={this.Name}] Full Rebuild Started", null); this.DoRebuild(providerUpdateContext, indexingOptions, cancellationToken); CrawlingLog.Log.Warn($"[Index={this.Name}] Full Rebuild Ended", null); } } }
protected override void PerformRefresh(IIndexable indexableStartingPoint, IndexingOptions indexingOptions, CancellationToken cancellationToken) { this.VerifyNotDisposed(); if (!this.ShouldStartIndexing(indexingOptions)) { return; } if (!Enumerable.Any <IProviderCrawler>(this.Crawlers, c => c.HasItemsToIndex())) { return; } using (var context = this.CreateUpdateContext()) { foreach (var crawler in this.Crawlers) { crawler.RefreshFromRoot(context, indexableStartingPoint, indexingOptions, CancellationToken.None); } context.Commit(); if ((this.IndexingState & IndexingState.Stopped) == IndexingState.Stopped) { return; } context.Optimize(); } }
public virtual async Task IndexAsync(IndexingOptions options, Action <IndexingProgress> progressCallback, CancellationToken cancellationToken) { if (options == null) { throw new ArgumentNullException(nameof(options)); } if (string.IsNullOrEmpty(options.DocumentType)) { throw new ArgumentNullException($"{nameof(options)}.{nameof(options.DocumentType)}"); } if (options.BatchSize < 1) { throw new ArgumentException(@"Batch size cannon be less than 1", $"{nameof(options)}.{nameof(options.BatchSize)}"); } cancellationToken.ThrowIfCancellationRequested(); var documentType = options.DocumentType; if (options.DeleteExistingIndex) { await DeleteIndexAsync(documentType, progressCallback, cancellationToken); } var configs = _configs.Where(c => c.DocumentType.EqualsInvariant(documentType)).ToArray(); foreach (var config in configs) { await ProcessConfigurationAsync(config, options, progressCallback, cancellationToken); } }
protected override void PerformRebuild(IndexingOptions indexingOptions, CancellationToken cancellationToken) { if (!ShouldStartIndexing(indexingOptions)) { return; } if (SwitchOnRebuild) { DoRebuild(indexingOptions); ISearchService searchService = SearchService; EventRaiser.RaiseRebuildEndEvent(new SwitchOnRebuildEventRemote() { IndexName = Name, SearchCloudIndexName = SearchCloudIndexName, RebuildCloudIndexName = RebuildCloudIndexName }); Thread.Sleep(OldIndexCleanUpDelay); searchService.Cleanup(); } else { Reset(); DoRebuild(indexingOptions); } }
protected override void PerformRebuild(IndexingOptions indexingOptions, CancellationToken cancellationToken) { EnsureInitialized(); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); AzureServiceClient.Indexes.Delete(this.Name); AzureSchema.AzureSchemaBuilt = false; using (IProviderUpdateContext updateContext = CreateUpdateContext()) { foreach (var crawler in this.Crawlers) { crawler.RebuildFromRoot(updateContext, indexingOptions, CancellationToken.None); } if ((this.IndexingState & IndexingState.Stopped) != IndexingState.Stopped) { updateContext.Optimize(); } updateContext.Commit(); updateContext.Optimize(); stopwatch.Stop(); if ((this.IndexingState & IndexingState.Stopped) == IndexingState.Stopped) { return; } this.PropertyStore.Set(IndexProperties.RebuildTime, stopwatch.ElapsedMilliseconds.ToString((IFormatProvider)CultureInfo.InvariantCulture)); } }
/// <summary> /// Swap between active and backup indeces, if supported /// </summary> protected virtual async Task SwapIndices(IndexingOptions options) { if (options.DeleteExistingIndex && _searchProvider is ISupportIndexSwap swappingSupportedSearchProvider) { await swappingSupportedSearchProvider.SwapIndexAsync(options.DocumentType); } }
public override void Rebuild(IndexingOptions indexingOptions) { //TODO: Build the Azure Index Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); BuildAzureIndex(); using (IProviderUpdateContext updateContext = CreateUpdateContext()) { foreach (var crawler in this.Crawlers) { crawler.RebuildFromRoot(updateContext, indexingOptions, CancellationToken.None); } if ((this.IndexingState & IndexingState.Stopped) != IndexingState.Stopped) { updateContext.Optimize(); } updateContext.Commit(); updateContext.Optimize(); stopwatch.Stop(); if ((this.IndexingState & IndexingState.Stopped) == IndexingState.Stopped) { return; } this.PropertyStore.Set(IndexProperties.RebuildTime, stopwatch.ElapsedMilliseconds.ToString((IFormatProvider)CultureInfo.InvariantCulture)); } }
public RegistrationController(IDataStore dataStore, IMemoryCache memoryCache, Fido2 lib, ElasticClient elasticClient, IOptions <IndexingOptions> indexOptions) { _memoryCache = memoryCache; _dataStore = dataStore; _lib = lib; _elasticClient = elasticClient; _indexOptions = indexOptions.Value; }
public AuthenticationController(IMemoryCache memoryCache, IDataStore dataStorage, Fido2 lib, IOptions <IndexingOptions> indexOptions, ElasticClient elasticClient) { _memoryCache = memoryCache; _dataStore = dataStorage; _lib = lib; _indexOptions = indexOptions.Value; _elasticClient = elasticClient; }
public async Task <ActionResult> SwapIndexAsync([FromBody] IndexingOptions option) { if (_searchProvider is ISupportIndexSwap supportIndexSwapSearchProvider) { await supportIndexSwapSearchProvider.SwapIndexAsync(option.DocumentType); } return(Ok()); }
private void OnAddToIndex() { var paths = SelectedPaths .Select(pvm => Path.GetFullPath(pvm.Path)) .ToArray(); IndexingOptions options = new IndexingOptions(); options.IsAsync = true; index.Add(paths, options); }
public async Task RebuildIndexes(IndexingOptions options) { var serviceClient = CreateSearchServiceClient(_configuration.AzureSearchConfiguration); //await RebuildCourseIndex(serviceClient); //await RebuildLocationIndex(serviceClient); await RebuildCombinedIndex(serviceClient, options); }
private async Task IndexAllDocumentsAsync(IndexingOptions options, ICancellationToken cancellationToken) { var oldIndexationDate = GetLastIndexationDate(options.DocumentType); var newIndexationDate = DateTime.UtcNow; await _indexingManager.IndexAsync(options, _progressHandler.Progress, cancellationToken); // Save indexation date to prevent changes from being indexed again SetLastIndexationDate(options.DocumentType, oldIndexationDate, newIndexationDate); }
public override void Delete(IIndexableId indexableId, IndexingOptions indexingOptions) { using (var context = this.CreateUpdateContext()) { foreach (var crawler in this.Crawlers) { crawler.Delete(context, indexableId, indexingOptions); } context.Commit(); } }
public override void Refresh(IIndexable indexableStartingPoint, IndexingOptions indexingOptions) { using (var context = this.CreateUpdateContext()) { foreach (var crawler in this.Crawlers) { crawler.RefreshFromRoot(context, indexableStartingPoint, indexingOptions); } context.Optimize(); context.Commit(); } }
private async Task IndexChangesAsync(IndexingOptions options, ICancellationToken cancellationToken) { var oldIndexationDate = options.StartDate; var newIndexationDate = DateTime.UtcNow; options.EndDate = oldIndexationDate == null ? null : (DateTime?)newIndexationDate; await _indexingManager.IndexAsync(options, _progressHandler.Progress, cancellationToken); // Save indexation date. It will be used as a start date for the next indexation SetLastIndexationDate(options.DocumentType, oldIndexationDate, newIndexationDate); }
public async Task CanIndexAllDocuments(string operation, int batchSize, params string[] sourceNames) { var rebuild = operation == Rebuild; var searchProvider = new SearchProvider(); var documentSources = GetDocumentSources(sourceNames); var manager = GetIndexingManager(searchProvider, documentSources); var progress = new List <IndexingProgress>(); var cancellationTokenSource = new CancellationTokenSource(); var options = new IndexingOptions { DocumentType = DocumentType, DeleteExistingIndex = rebuild, StartDate = rebuild ? null : (DateTime?)new DateTime(1, 1, 1), EndDate = rebuild ? null : (DateTime?)new DateTime(1, 1, 9), BatchSize = batchSize, }; await manager.IndexAsync(options, p => progress.Add(p), cancellationTokenSource.Token); var expectedBatchesCount = GetExpectedBatchesCount(rebuild, documentSources, batchSize); var expectedProgressItemsCount = (rebuild ? 1 : 0) + 1 + expectedBatchesCount + 1; Assert.Equal(expectedProgressItemsCount, progress.Count); var i = 0; if (rebuild) { Assert.Equal($"{DocumentType}: deleting index", progress[i++].Description); } Assert.Equal($"{DocumentType}: calculating total count", progress[i++].Description); for (var batch = 0; batch < expectedBatchesCount; batch++) { var progressItem = progress[i++]; Assert.Equal($"{DocumentType}: {progressItem.ProcessedCount} of {progressItem.TotalCount} have been indexed", progressItem.Description); } Assert.Equal($"{DocumentType}: indexation finished", progress[i].Description); ValidateErrors(progress, "bad1"); var expectedFieldNames = new List <string>(sourceNames) { KnownDocumentFields.IndexationDate }; ValidateIndexedDocuments(searchProvider.IndexedDocuments.Values, expectedFieldNames, "good2", "good3"); }
//TODO: Could be removed, hasn't changed from what is being overridden, just wanted to see public override void RebuildFromRoot(IProviderUpdateContext context, IndexingOptions indexingOptions, CancellationToken cancellationToken) { Assert.ArgumentNotNull(context, "context"); if (!ShouldStartIndexing(indexingOptions)) { return; } var indexableRoot = GetIndexableRoot(); Assert.IsNotNull(indexableRoot, "RebuildFromRoot: Unable to retrieve root item"); Assert.IsNotNull(DocumentOptions, "DocumentOptions"); context.Index.Locator.GetInstance <IEvent>().RaiseEvent("indexing:addingrecursive", context.Index.Name, indexableRoot.UniqueId, indexableRoot.AbsolutePath); AddHierarchicalRecursive(indexableRoot, context, index.Configuration, cancellationToken); context.Index.Locator.GetInstance <IEvent>().RaiseEvent("indexing:addedrecursive", context.Index.Name, indexableRoot.UniqueId, indexableRoot.AbsolutePath); }
protected virtual async Task <IIndexDocumentChangeFeed[]> GetChangeFeeds( IndexDocumentConfiguration configuration, IndexingOptions options) { // Return in-memory change feed for specific set of document ids. if (options.DocumentIds != null) { return(new IIndexDocumentChangeFeed[] { new InMemoryIndexDocumentChangeFeed(options.DocumentIds.ToArray(), IndexDocumentChangeType.Modified, options.BatchSize ?? 50) }); } // Support old ChangesProvider. if (configuration.DocumentSource.ChangeFeedFactory == null) { configuration.DocumentSource.ChangeFeedFactory = new IndexDocumentChangeFeedFactoryAdapter(configuration.DocumentSource.ChangesProvider); } var factories = new List <IIndexDocumentChangeFeedFactory> { configuration.DocumentSource.ChangeFeedFactory }; // In case of 'full' re-index we don't want to include the related sources, // because that would double the indexation work. // E.g. All products would get indexed for the primary document source // and afterwards all products would get re-indexed for all the prices as well. if (options.StartDate != null || options.EndDate != null) { foreach (var related in configuration.RelatedSources ?? Enumerable.Empty <IndexDocumentSource>()) { // Support old ChangesProvider. if (related.ChangeFeedFactory == null) { related.ChangeFeedFactory = new IndexDocumentChangeFeedFactoryAdapter(related.ChangesProvider); } factories.Add(related.ChangeFeedFactory); } } return(await Task.WhenAll(factories.Select(x => x.CreateFeed(options.StartDate, options.EndDate, options.BatchSize ?? 50)))); }
protected virtual void DoRebuild(IndexingOptions indexingOptions) { var timer = new Stopwatch(); timer.Start(); using (var context = this.CreateUpdateContext()) { foreach (var crawler in this.Crawlers) { crawler.RebuildFromRoot(context, indexingOptions); } context.Optimize(); context.Commit(); } timer.Stop(); this.PropertyStore.Set(IndexProperties.RebuildTime, timer.ElapsedMilliseconds.ToString(CultureInfo.InvariantCulture)); }
public async Task RebuildCombinedIndex(ISearchServiceClient serviceClient, IndexingOptions options) { var indexName = SearchConstants.CombinedIndexName; await DeleteIndexIfExists(indexName, serviceClient); //await CreateCustomAnalyzers(serviceClient); await CreateCombinedIndex(serviceClient); if (options.HasFlag(IndexingOptions.UseSynonyms)) { await CreateCourseSynonymMap(serviceClient); await EnableSynonymsInCombinedIndex(serviceClient); } await UploadCombinedIndexItems(serviceClient.Indexes.GetClient(indexName)); }
public override void Delete(IIndexableId indexableId, IndexingOptions indexingOptions) { this.VerifyNotDisposed(); if (!this.ShouldStartIndexing(indexingOptions)) { return; } using (IProviderUpdateContext updateContext = this.CreateUpdateContext()) { foreach (IProviderCrawler providerCrawler in this.Crawlers) { providerCrawler.Delete(updateContext, indexableId, indexingOptions); } updateContext.Commit(); } }
public async Task CanIndexSpecificDocuments(int batchSize, params string[] sourceNames) { var searchProvider = new SearchProvider(); var documentSources = GetDocumentSources(sourceNames); var manager = GetIndexingManager(searchProvider, documentSources); var progress = new List <IndexingProgress>(); var cancellationTokenSource = new CancellationTokenSource(); var options = new IndexingOptions { DocumentType = DocumentType, DocumentIds = new[] { "bad1", "good3", "non-existent-id" }, BatchSize = batchSize, }; await manager.IndexAsync(options, p => progress.Add(p), cancellationTokenSource.Token); var expectedBatchesCount = GetBatchesCount(options.DocumentIds.Count, batchSize); var expectedProgressItemsCount = 1 + expectedBatchesCount + 1; Assert.Equal(expectedProgressItemsCount, progress.Count); var i = 0; Assert.Equal($"{DocumentType}: calculating total count", progress[i++].Description); for (var batch = 0; batch < expectedBatchesCount; batch++) { var progressItem = progress[i++]; Assert.Equal($"{DocumentType}: {progressItem.ProcessedCount} of {progressItem.TotalCount} have been indexed", progressItem.Description); } Assert.Equal($"{DocumentType}: indexation finished", progress[i].Description); ValidateErrors(progress, "bad1"); var expectedFieldNames = new List <string>(sourceNames) { KnownDocumentFields.IndexationDate }; ValidateIndexedDocuments(searchProvider.IndexedDocuments.Values, expectedFieldNames, "good3"); }
public virtual async Task IndexAsync(IndexingOptions options, Action <IndexingProgress> progressCallback, ICancellationToken cancellationToken) { if (options == null) { throw new ArgumentNullException(nameof(options)); } if (string.IsNullOrEmpty(options.DocumentType)) { throw new ArgumentNullException($"{nameof(options)}.{nameof(options.DocumentType)}"); } if (options.BatchSize == null) { options.BatchSize = _settingsManager?.GetValue(ModuleConstants.Settings.General.IndexPartitionSize.Name, 50) ?? 50; } if (options.BatchSize < 1) { throw new ArgumentException(@$ "{nameof(options.BatchSize)} {options.BatchSize} cannon be less than 1", $"{nameof(options)}"); } cancellationToken.ThrowIfCancellationRequested(); var documentType = options.DocumentType; // each Search Engine implementation has its own way of handing index rebuild if (options.DeleteExistingIndex) { progressCallback?.Invoke(new IndexingProgress($"{documentType}: deleting index", documentType)); await _searchProvider.DeleteIndexAsync(documentType); } var configs = _configs.Where(c => c.DocumentType.EqualsInvariant(documentType)).ToArray(); foreach (var config in configs) { await ProcessConfigurationAsync(config, options, progressCallback, cancellationToken); } }
protected virtual void DoRebuild(IProviderUpdateContext context, IndexingOptions indexingOptions, CancellationToken cancellationToken) { var stopwatch = new Stopwatch(); stopwatch.Start(); using (IProviderUpdateContext providerUpdateContext = this.CreateFullRebuildContext()) { foreach (IProviderCrawler current in base.Crawlers) { current.RebuildFromRoot(providerUpdateContext, indexingOptions, cancellationToken); } if ((base.IndexingState & IndexingState.Stopped) != IndexingState.Stopped) { providerUpdateContext.Optimize(); } providerUpdateContext.Commit(); } stopwatch.Stop(); if ((base.IndexingState & IndexingState.Stopped) != IndexingState.Stopped) { this.PropertyStore.Set(IndexProperties.RebuildTime, stopwatch.ElapsedMilliseconds.ToString(CultureInfo.InvariantCulture)); } }
public void Index(string folder, string target) { if (!Directory.Exists(folder)) { Console.WriteLine("Document directory '" + folder + "' does not exist or is not readable, " + "please check the path"); return; } if (Directory.Exists(target)) { Directory.Delete(target, true); } //Console.WriteLine("Cannot save index to '" + target + "' directory, please delete it first"); //return; var start = DateTime.Now; try { var indexer = _indexerFactory.GetInstance(IndexerType.Manual); var options = new IndexingOptions { Folder = folder }; indexer.Index(options); var end = DateTime.Now; var ts = end - start; Console.WriteLine("Time spent: , {0:dd\\.hh\\:mm\\:ss\\.fffff}", ts); } catch (IOException e) { Console.WriteLine(" caught a " + e.GetType() + "\n with message: " + e.Message); } }
public Task RebuildAsync(IndexingOptions indexingOptions, CancellationToken cancellationToken) { throw new NotImplementedException(); }
public void Update(IEnumerable<IIndexableUniqueId> indexableUniqueIds, IndexingOptions indexingOptions) { }
public override void Delete(IIndexableId indexableId, IndexingOptions indexingOptions) { this.VerifyNotDisposed(); if (!this.ShouldStartIndexing(indexingOptions)) return; using (IProviderUpdateContext updateContext = this.CreateUpdateContext()) { foreach (IProviderCrawler providerCrawler in this.Crawlers) providerCrawler.Delete(updateContext, indexableId, indexingOptions); updateContext.Commit(); } }
public void Delete(IIndexableId indexableId, IndexingOptions indexingOptions) { }
public override void Rebuild(IndexingOptions indexingOptions) { PerformRebuild(indexingOptions, CancellationToken.None); }
public Task RefreshAsync(IIndexable indexableStartingPoint, IndexingOptions indexingOptions, CancellationToken cancellationToken) { throw new NotImplementedException(); }
protected override void PerformRefresh(IIndexable indexableStartingPoint, IndexingOptions indexingOptions, CancellationToken cancellationToken) { throw new NotImplementedException(); }
private void PerformUpdate(IEnumerable<IIndexableUniqueId> indexableUniqueIds, IndexingOptions indexingOptions) { if (!this.ShouldStartIndexing(indexingOptions)) return; var instance1 = this.Locator.GetInstance<IEvent>(); instance1.RaiseEvent("indexing:start", new object[2] { this.Name, false }); var instance2 = this.Locator.GetInstance<IEventManager>(); var indexingStartedEvent1 = new IndexingStartedEvent(); indexingStartedEvent1.IndexName = this.Name; indexingStartedEvent1.FullRebuild = false; var indexingStartedEvent2 = indexingStartedEvent1; instance2.QueueEvent<IndexingStartedEvent>(indexingStartedEvent2); var context = this.CreateUpdateContext(); try { if (context.IsParallel) { Parallel.ForEach<IIndexableUniqueId>(indexableUniqueIds, context.ParallelOptions, (Action<IIndexableUniqueId>)(uniqueId => { if (!this.ShouldStartIndexing(indexingOptions)) return; foreach (var providerCrawler in (IEnumerable<IProviderCrawler>)this.Crawlers) providerCrawler.Update(context, uniqueId, indexingOptions); })); if (!this.ShouldStartIndexing(indexingOptions)) { context.Commit(); return; } } else { foreach (var indexableUniqueId in indexableUniqueIds) { if (!this.ShouldStartIndexing(indexingOptions)) { context.Commit(); return; } foreach (IProviderCrawler providerCrawler in (IEnumerable<IProviderCrawler>)this.Crawlers) providerCrawler.Update(context, indexableUniqueId, indexingOptions); } } context.Commit(); } finally { if (context != null) context.Dispose(); } instance1.RaiseEvent("indexing:end", new object[2] { this.Name, false }); var instance3 = this.Locator.GetInstance<IEventManager>(); var indexingFinishedEvent1 = new IndexingFinishedEvent(); indexingFinishedEvent1.IndexName = this.Name; indexingFinishedEvent1.FullRebuild = false; var indexingFinishedEvent2 = indexingFinishedEvent1; instance3.QueueEvent<IndexingFinishedEvent>(indexingFinishedEvent2); }
public void Delete(IIndexableUniqueId indexableUniqueId, IndexingOptions indexingOptions) { throw new NotImplementedException(); }
protected override void PerformRefresh(IIndexable indexableStartingPoint, IndexingOptions indexingOptions, CancellationToken cancellationToken) { this.VerifyNotDisposed(); if (!this.ShouldStartIndexing(indexingOptions)) return; if (!Enumerable.Any<IProviderCrawler>(this.Crawlers, c => c.HasItemsToIndex())) return; using (var context = this.CreateUpdateContext()) { foreach (var crawler in this.Crawlers) { crawler.RefreshFromRoot(context, indexableStartingPoint, indexingOptions, CancellationToken.None); } context.Commit(); if ((this.IndexingState & IndexingState.Stopped) == IndexingState.Stopped) return; context.Optimize(); } }
public override void Update(IIndexableUniqueId indexableUniqueId, IndexingOptions indexingOptions) { Update(new List<IIndexableUniqueId> { indexableUniqueId }, IndexingOptions.Default); }
public override void Update(IIndexableUniqueId indexableUniqueId, IndexingOptions indexingOptions) { }
public override void Refresh(IIndexable indexableStartingPoint, IndexingOptions indexingOptions) { throw new NotImplementedException(); }
protected override void PerformRebuild(IndexingOptions indexingOptions, CancellationToken cancellationToken) { throw new NotImplementedException(); }
public override void Delete(IIndexableId indexableId, IndexingOptions indexingOptions) { }
public void Rebuild(IndexingOptions indexingOptions) { throw new NotImplementedException(); }
public Task RebuildIndexes(IndexingOptions options) { return(Task.FromResult(0)); }
public override Task RebuildAsync(IndexingOptions indexingOptions, CancellationToken cancellationToken) { return Task.Run(() => Rebuild(indexingOptions), cancellationToken); }
public override void Refresh(IIndexable indexableStartingPoint, IndexingOptions indexingOptions) { //TODO: Refresh Azure Indexes }
public override void Update(IEnumerable <IIndexableUniqueId> indexableUniqueIds, IndexingOptions indexingOptions) { throw new NotImplementedException(); }
public override void Refresh(IIndexable indexableStartingPoint, IndexingOptions indexingOptions) { PerformRefresh(indexableStartingPoint, indexingOptions, CancellationToken.None); }
public override Task RefreshAsync(IIndexable indexableStartingPoint, IndexingOptions indexingOptions, CancellationToken cancellationToken) { return Task.Run(() => Refresh(indexableStartingPoint, indexingOptions), cancellationToken); }
public override void Update(IEnumerable<IIndexableUniqueId> indexableUniqueIds, IndexingOptions indexingOptions) { PerformUpdate(indexableUniqueIds, indexingOptions); }
protected virtual async Task ProcessConfigurationAsync(IndexDocumentConfiguration configuration, IndexingOptions options, Action <IndexingProgress> progressCallback, ICancellationToken cancellationToken) { if (configuration == null) { throw new ArgumentNullException(nameof(configuration)); } if (string.IsNullOrEmpty(configuration.DocumentType)) { throw new ArgumentNullException($"{nameof(configuration)}.{nameof(configuration.DocumentType)}"); } if (configuration.DocumentSource == null) { throw new ArgumentNullException($"{nameof(configuration)}.{nameof(configuration.DocumentSource)}"); } if (configuration.DocumentSource.ChangesProvider == null) { throw new ArgumentNullException($"{nameof(configuration)}.{nameof(configuration.DocumentSource)}.{nameof(configuration.DocumentSource.ChangesProvider)}"); } if (configuration.DocumentSource.DocumentBuilder == null) { throw new ArgumentNullException($"{nameof(configuration)}.{nameof(configuration.DocumentSource)}.{nameof(configuration.DocumentSource.DocumentBuilder)}"); } cancellationToken.ThrowIfCancellationRequested(); var documentType = options.DocumentType; progressCallback?.Invoke(new IndexingProgress($"{documentType}: calculating total count", documentType)); var batchOptions = new BatchIndexingOptions { DocumentType = options.DocumentType, PrimaryDocumentBuilder = configuration.DocumentSource.DocumentBuilder, SecondaryDocumentBuilders = configuration.RelatedSources ?.Where(s => s.DocumentBuilder != null) .Select(s => s.DocumentBuilder) .ToList(), }; var feeds = await GetChangeFeeds(configuration, options); // Try to get total count to indicate progress. Some feeds don't have a total count. var totalCount = feeds.Any(x => x.TotalCount == null) ? (long?)null : feeds.Sum(x => x.TotalCount ?? 0); long processedCount = 0; var changes = await GetNextChangesAsync(feeds); while (changes.Any()) { IList <string> errors = null; if (_backgroundWorker == null) { var indexingResult = await ProcessChangesAsync(changes, batchOptions, cancellationToken); errors = GetIndexingErrors(indexingResult); } else { // We're executing a job to index all documents or the changes since a specific time. // Priority for this indexation work should be quite low. var documentIds = changes .Select(x => x.DocumentId) .Distinct() .ToArray(); _backgroundWorker.IndexDocuments(configuration.DocumentType, documentIds, IndexingPriority.Background); } processedCount += changes.Count; var description = totalCount != null ? $"{documentType}: {processedCount} of {totalCount} have been indexed" : $"{documentType}: {processedCount} have been indexed"; progressCallback?.Invoke(new IndexingProgress(description, documentType, totalCount, processedCount, errors)); changes = await GetNextChangesAsync(feeds); } progressCallback?.Invoke(new IndexingProgress($"{documentType}: indexation finished", documentType, totalCount ?? processedCount, processedCount)); }
public void Refresh(IIndexable indexableStartingPoint, IndexingOptions indexingOptions) { throw new NotImplementedException(); }