public void UpdateStructure_UpdatesSelfAndChildren() { var page = Factory.GetPageData(); var children = new[] { Factory.GetPageData(), Factory.GetPageData(), Factory.GetPageData() }; var childrenLinks = children.Select(x => x.ContentLink).ToArray(); _fixture.ServiceLocationMock.ContentLoaderMock .Setup(m => m.GetDescendents(page.ContentLink)) .Returns(childrenLinks); _fixture.ServiceLocationMock.ContentLoaderMock .Setup(m => m.GetItems(childrenLinks, It.IsAny <CultureInfo>())) .Returns(children); _fixture.ServiceLocationMock.CoreIndexerMock.Invocations.Clear(); IndexingStatus result = _indexer.UpdateStructure(page); _fixture.ServiceLocationMock.CoreIndexerMock.Verify(m => m.Update(page.ContentLink.ID.ToString(), It.IsAny <object>(), It.IsAny <string>(), It.IsAny <Type>()), Times.Once); _fixture.ServiceLocationMock.CoreIndexerMock.Verify(m => m.Update(childrenLinks[0].ID.ToString(), It.IsAny <object>(), It.IsAny <string>(), It.IsAny <Type>()), Times.Once); _fixture.ServiceLocationMock.CoreIndexerMock.Verify(m => m.Update(childrenLinks[1].ID.ToString(), It.IsAny <object>(), It.IsAny <string>(), It.IsAny <Type>()), Times.Once); _fixture.ServiceLocationMock.CoreIndexerMock.Verify(m => m.Update(childrenLinks[2].ID.ToString(), It.IsAny <object>(), It.IsAny <string>(), It.IsAny <Type>()), Times.Once); }
private bool UpdateIndex(string indexName, string settingsFilename, IndexSettings indexSettings) { var addToIndex = new List <IDocumentIndex>(); // Rebuilding the inde logger.Value.Info("Rebuilding index"); indexingStatus = IndexingStatus.Rebuilding; foreach (var contentProvider in contentProviders) { addToIndex.AddRange(contentProvider.GetDocuments(id => indexProvider.New(id))); } // save current state of the index indexSettings.LastIndexedUtc = DateTime.UtcNow; appDataFolder.CreateFile(settingsFilename, indexSettings.ToXml()); if (addToIndex.Count == 0) { // nothing more to do indexingStatus = IndexingStatus.Idle; return(false); } // save new and updated documents to the index indexProvider.Store(indexName, addToIndex); logger.Value.InfoFormat("Added documents to index: {0}", addToIndex.Count); return(true); }
public DynamicJsonValue ToJson() { return(new DynamicJsonValue { [nameof(Name)] = Name, [nameof(Disabled)] = Disabled, [nameof(TotalSize)] = new DynamicJsonValue { [nameof(Size.HumaneSize)] = TotalSize.HumaneSize, [nameof(Size.SizeInBytes)] = TotalSize.SizeInBytes }, [nameof(IsAdmin)] = IsAdmin, [nameof(IsEncrypted)] = IsEncrypted, [nameof(UpTime)] = UpTime?.ToString(), [nameof(BackupInfo)] = BackupInfo?.ToJson(), [nameof(Alerts)] = Alerts, [nameof(RejectClients)] = false, [nameof(IndexingErrors)] = IndexingErrors, [nameof(DocumentsCount)] = DocumentsCount, [nameof(HasRevisionsConfiguration)] = HasRevisionsConfiguration, [nameof(HasExpirationConfiguration)] = HasExpirationConfiguration, [nameof(IndexesCount)] = IndexesCount, [nameof(IndexingStatus)] = IndexingStatus.ToString(), [nameof(NodesTopology)] = NodesTopology?.ToJson(), [nameof(ReplicationFactor)] = ReplicationFactor, [nameof(DynamicNodesDistribution)] = DynamicNodesDistribution, [nameof(DeletionInProgress)] = DynamicJsonValue.Convert(DeletionInProgress) }); }
public JsonResult UpdateItem(string id, bool recursive = false) { try { if (_contentLoader.TryGet(ContentReference.Parse(id), out IContent content)) { string indexName = null; // Point catalog content to correct index if (Constants.CommerceProviderName.Equals(content.ContentLink.ProviderName)) { string lang = _indexer.GetLanguage(content); indexName = _settings.GetCustomIndexName($"{_settings.Index}-{Constants.CommerceProviderName}", lang); } IndexingStatus status = recursive ? _indexer.UpdateStructure(content, indexName) : _indexer.Update(content, indexName); return(Json(new { status = status.ToString() })); } } catch (Exception ex) { Logger.Error("Error updating item with id '" + id + "'", ex); return(Json(new { status = nameof(IndexingStatus.Error), error = ex.Message })); } return(Json(new { status = nameof(IndexingStatus.Error) })); }
private void OnStateChanged(IndexingStatus status) { if (StateChanged == null) { return; } string[] state_info; if (status == IndexingStatus.NotRunning) { state_info = new string[] { "IDLE", "" } } ; else { state_info = GetState(); } if (Debug) { Console.Error.WriteLine("StateChanged -- {0} - {1}", state_info [0], state_info [1]); } StateChanged(state_info); }
public void Update_IncludesShortcutTypeNormal() { TestPage page = Factory.GetTestPage(); IndexingStatus result = _indexer.Update(page); Assert.Equal(IndexingStatus.Ok, result); _coreIndexerMock.Verify(m => m.Update(It.IsAny <string>(), It.IsAny <object>(), It.IsAny <string>(), It.IsAny <Type>()), Times.Once); }
public void Update_TypeDecoratedWithExcludeAttribute_IsNotIndexed() { var excludedType = new TypeWithExcludeAttribute(); IndexingStatus result = _indexer.Update(excludedType); _coreIndexerMock.Verify( m => m.Update(It.IsAny <string>(), It.IsAny <object>(), It.IsAny <string>(), null), Times.Never()); Assert.Equal(IndexingStatus.ExcludedByConvention, result); }
public void Update_TypeWithHideFromSearchProperty_IsNotIndexed() { var hiddenType = new TypeWithHideFromSearchProperty(); IndexingStatus result = _indexer.Update(hiddenType); _coreIndexerMock.Verify( m => m.Update(It.IsAny <string>(), It.IsAny <object>(), It.IsAny <string>(), null), Times.Never()); Assert.Equal(IndexingStatus.HideFromSearchProperty, result); }
public void Update_ContentWithParentExcludedByRoot_IsNotIndexed() { var parentId = Factory.GetInteger(); Epinova.ElasticSearch.Core.Conventions.Indexing.Instance.ExcludeRoot(parentId); var page = Factory.GetTestPage(parentId: parentId); IndexingStatus result = _indexer.Update(page); Assert.Equal(IndexingStatus.ExcludedByConvention, result); }
public void Update_SkipsShortcutTypesOtherThanNormal(PageShortcutType type) { PageData page = Factory.GetPageData(); page.LinkType = type; IndexingStatus result = _indexer.Update(page); Assert.Equal(IndexingStatus.HideFromSearchProperty, result); _coreIndexerMock.Verify(m => m.Update(It.IsAny <string>(), It.IsAny <object>(), It.IsAny <string>(), It.IsAny <Type>()), Times.Never); }
public DirectoryIndexer() { mStatus = IndexingStatus.Empty; mKnownFiles = new ArrayList(); mIndexingThread = new Thread(new ThreadStart(IndexingThreadFunc)); mStatus = IndexingStatus.Empty; // default settingss mProjectRoot = "."; mMaxIndexSize = 20000; mMaxSearchResults = 50; mSearchWholePathString = false; }
/// <summary> /// Constructs a new instance of the Editor Search Engine and /// binds it to the specified model. /// </summary> /// <param name="model">The model to which the search engine will be bound.</param> public EditorSearchEngine(T model) { this.status = IndexingStatus.Ready; this.indexingQueue = new Dictionary <ModelObject, IndexingOperation> (); this.queueResetEvent = new ManualResetEvent(false); this.asyncOperation = AsyncOperationManager.CreateOperation(null); this.registeredObjects = new Dictionary <ModelObject, bool> (); this.collectionOwners = new Dictionary <INotifyingCollection, ModelObject> (); this.searchItems = new Dictionary <ModelObject, Dictionary <PropertyDescriptor, EditorSearchItem> > (); this.BeginIndexing(); this.Model = model; }
public void Update_HasDeletedStatus_IsExcluded() { TestPage page = Factory.GetTestPage(); page.Property["PageDeleted"] = new PropertyBoolean { Value = true }; IndexingStatus result = _indexer.Update(page); Assert.Equal(IndexingStatus.HideFromSearchProperty, result); _coreIndexerMock.Verify(m => m.Update(It.IsAny <string>(), It.IsAny <object>(), It.IsAny <string>(), It.IsAny <Type>()), Times.Never); }
private void OnIndexingStatusEvent(IndexingStatus status) { if (status == IndexingStatus.Running) { NotificationMessage m = new NotificationMessage(); m.Icon = Gtk.Stock.DialogInfo; m.Title = Catalog.GetString("Your data is being indexed"); m.Message = Catalog.GetString("The search service is in the process of indexing your data. Search results may be incomplete until indexing has finished."); notification_area.Display(m); } else { notification_area.Hide(); } }
public void Update_HasExpired_IsExcluded() { TestPage page = Factory.GetTestPage(); page.Property["PageStopPublish"] = new PropertyDate { Value = DateTime.Now.AddDays(-30) }; IndexingStatus result = _indexer.Update(page); Assert.Equal(IndexingStatus.HideFromSearchProperty, result); _coreIndexerMock.Verify(m => m.Update(It.IsAny <string>(), It.IsAny <object>(), It.IsAny <string>(), It.IsAny <Type>()), Times.Never); }
public void Update_HasHideFromSearch_IsExcluded() { TestPage page = Factory.GetTestPage(); page.Property["HidefromSearch"] = new PropertyBoolean { Value = true }; _fixture.ServiceLocationMock.CoreIndexerMock.Invocations.Clear(); IndexingStatus result = _indexer.Update(page); Assert.Equal(IndexingStatus.HideFromSearchProperty, result); _fixture.ServiceLocationMock.CoreIndexerMock.Verify(m => m.Update(It.IsAny <string>(), It.IsAny <object>(), It.IsAny <string>(), It.IsAny <Type>()), Times.Never); }
public void FormsUpload_IsNotIndexed() { TestMedia media = Factory.GetMediaData("foo", "jpg"); TestPage page = Factory.GetTestPage(); Indexer.FormsUploadNamespace = "TestData.ITestPage"; var assetHelperMock = new Mock <ContentAssetHelper>(); assetHelperMock .Setup(m => m.GetAssetOwner(media.ContentLink)) .Returns(page); _serviceLocationMock.ServiceLocatorMock.Setup(m => m.GetInstance <ContentAssetHelper>()).Returns(assetHelperMock.Object); IndexingStatus result = _indexer.Update(media); Assert.Equal(IndexingStatus.HideFromSearchProperty, result); }
public StartStopIndexingAndGetIndexingStatus() { using (var store = new DocumentStore()) { #region start_indexing_2 store.DatabaseCommands.Admin.StartIndexing(); #endregion #region stop_indexing_2 store.DatabaseCommands.Admin.StopIndexing(); #endregion #region get_indexing_status_2 store.DatabaseCommands.Admin.StopIndexing(); IndexingStatus status = store.DatabaseCommands.Admin.GetIndexingStatus(); // "Paused" store.DatabaseCommands.Admin.StartIndexing(); status = store.DatabaseCommands.Admin.GetIndexingStatus(); // "Indexing" #endregion } }
private void EnqueueIndexingOperation(ModelObject modelObject, IndexingOperation indexingOperation) { lock (this.indexingQueue) { IndexingOperation pendingOperation; if (this.indexingQueue.TryGetValue(modelObject, out pendingOperation)) { if (indexingOperation != pendingOperation) { this.indexingQueue.Remove(modelObject); } } else { this.indexingQueue.Add(modelObject, indexingOperation); } } this.Status = IndexingStatus.Indexing; this.queueResetEvent.Set(); }
public void FormsUpload_IsNotIndexed() { TestMedia media = Factory.GetMediaData("foo", "jpg"); TestPage page = Factory.GetTestPage(); var assetHelperMock = new Mock <ContentAssetHelper>(); assetHelperMock .Setup(m => m.GetAssetOwner(media.ContentLink)) .Returns(page); _indexer = new Indexer( _fixture.ServiceLocationMock.CoreIndexerMock.Object, _fixture.ServiceLocationMock.SettingsMock.Object, new Mock <ISiteDefinitionRepository>().Object, _fixture.ServiceLocationMock.ContentLoaderMock.Object, assetHelperMock.Object); IndexingStatus result = _indexer.Update(media); Assert.Equal(IndexingStatus.HideFromSearchProperty, result); }
/// <summary> /// Indexes a batch of content items /// </summary> /// <returns> /// <c>true</c> if there are more items to process; otherwise, <c>false</c>. /// </returns> private bool BatchIndex(string indexName, string settingsFilename, IndexSettings indexSettings) { var addToIndex = new List <IDocumentIndex>(); var deleteFromIndex = new List <int>(); bool loop = false; // Rebuilding the index ? if (indexSettings.Mode == IndexingMode.Rebuild) { Logger.Information("Rebuilding index"); _indexingStatus = IndexingStatus.Rebuilding; do { loop = true; // load all content items var contentItems = _contentRepository .Table.Where(versionRecord => versionRecord.Latest && versionRecord.Id > indexSettings.LastContentId) .OrderBy(versionRecord => versionRecord.Id) .Take(ContentItemsPerLoop) .ToList() .Select(versionRecord => _contentManager.Get(versionRecord.ContentItemRecord.Id, VersionOptions.VersionRecord(versionRecord.Id))) .Distinct() .ToList(); // if no more elements to index, switch to update mode if (contentItems.Count == 0) { indexSettings.Mode = IndexingMode.Update; } foreach (var item in contentItems) { try { var settings = GetTypeIndexingSettings(item); // skip items from types which are not indexed if (settings.List.Contains(indexName)) { if (item.HasPublished()) { var published = _contentManager.Get(item.Id, VersionOptions.Published); IDocumentIndex documentIndex = ExtractDocumentIndex(published); if (documentIndex != null && documentIndex.IsDirty) { addToIndex.Add(documentIndex); } } } else if (settings.List.Contains(indexName + ":latest")) { IDocumentIndex documentIndex = ExtractDocumentIndex(item); if (documentIndex != null && documentIndex.IsDirty) { addToIndex.Add(documentIndex); } } indexSettings.LastContentId = item.VersionRecord.Id; } catch (Exception ex) { Logger.Warning(ex, "Unable to index content item #{0} during rebuild", item.Id); } } if (contentItems.Count < ContentItemsPerLoop) { loop = false; } else { _transactionManager.RequireNew(); } } while (loop); } if (indexSettings.Mode == IndexingMode.Update) { Logger.Information("Updating index"); _indexingStatus = IndexingStatus.Updating; do { var indexingTasks = _taskRepository .Table.Where(x => x.Id > indexSettings.LastIndexedId) .OrderBy(x => x.Id) .Take(ContentItemsPerLoop) .ToList() .GroupBy(x => x.ContentItemRecord.Id) .Select(group => new { TaskId = group.Max(task => task.Id), Delete = group.Last().Action == IndexingTaskRecord.Delete, Id = group.Key, ContentItem = _contentManager.Get(group.Key, VersionOptions.Latest) }) .OrderBy(x => x.TaskId) .ToArray(); foreach (var item in indexingTasks) { try { IDocumentIndex documentIndex = null; // item.ContentItem can be null if the content item has been deleted if (item.ContentItem != null) { // skip items from types which are not indexed var settings = GetTypeIndexingSettings(item.ContentItem); if (settings.List.Contains(indexName)) { if (item.ContentItem.HasPublished()) { var published = _contentManager.Get(item.Id, VersionOptions.Published); documentIndex = ExtractDocumentIndex(published); } } else if (settings.List.Contains(indexName + ":latest")) { var latest = _contentManager.Get(item.Id, VersionOptions.Latest); documentIndex = ExtractDocumentIndex(latest); } } if (documentIndex == null || item.Delete) { deleteFromIndex.Add(item.Id); } else if (documentIndex.IsDirty) { addToIndex.Add(documentIndex); } indexSettings.LastIndexedId = item.TaskId; } catch (Exception ex) { Logger.Warning(ex, "Unable to index content item #{0} during update", item.Id); } } if (indexingTasks.Length < ContentItemsPerLoop) { loop = false; } else { _transactionManager.RequireNew(); } } while (loop); } // save current state of the index indexSettings.LastIndexedUtc = _clock.UtcNow; _appDataFolder.CreateFile(settingsFilename, indexSettings.ToXml()); if (deleteFromIndex.Count == 0 && addToIndex.Count == 0) { // nothing more to do _indexingStatus = IndexingStatus.Idle; return(false); } // save new and updated documents to the index try { if (addToIndex.Count > 0) { _indexProvider.Store(indexName, addToIndex); Logger.Information("Added content items to index: {0}", addToIndex.Count); } } catch (Exception ex) { Logger.Warning(ex, "An error occured while adding a document to the index"); } // removing documents from the index try { if (deleteFromIndex.Count > 0) { _indexProvider.Delete(indexName, deleteFromIndex); Logger.Information("Added content items to index: {0}", addToIndex.Count); } } catch (Exception ex) { Logger.Warning(ex, "An error occured while removing a document from the index"); } return(true); }
/// <summary> /// Indexes a batch of content items /// </summary> /// <returns> /// <c>true</c> if there are more items to process; otherwise, <c>false</c>. /// </returns> private bool BatchIndex(string indexName, string settingsFilename, IndexSettings indexSettings) { var addToIndex = new List<IDocumentIndex>(); var deleteFromIndex = new List<int>(); bool loop = false; // Rebuilding the index ? if (indexSettings.Mode == IndexingMode.Rebuild) { Logger.Information("Rebuilding index"); _indexingStatus = IndexingStatus.Rebuilding; do { loop = true; // load all content items var contentItems = _contentRepository .Table.Where(versionRecord => versionRecord.Latest && versionRecord.Id > indexSettings.LastContentId) .OrderBy(versionRecord => versionRecord.Id) .Take(ContentItemsPerLoop) .ToList() .Select(versionRecord => _contentManager.Get(versionRecord.ContentItemRecord.Id, VersionOptions.VersionRecord(versionRecord.Id))) .Distinct() .ToList(); // if no more elements to index, switch to update mode if (contentItems.Count == 0) { indexSettings.Mode = IndexingMode.Update; } foreach (var item in contentItems) { try { var settings = GetTypeIndexingSettings(item); // skip items from types which are not indexed if (settings.List.Contains(indexName)) { if (item.HasPublished()) { var published = _contentManager.Get(item.Id, VersionOptions.Published); IDocumentIndex documentIndex = ExtractDocumentIndex(published); if (documentIndex != null && documentIndex.IsDirty) { addToIndex.Add(documentIndex); } } } else if (settings.List.Contains(indexName + ":latest")) { IDocumentIndex documentIndex = ExtractDocumentIndex(item); if (documentIndex != null && documentIndex.IsDirty) { addToIndex.Add(documentIndex); } } indexSettings.LastContentId = item.VersionRecord.Id; } catch (Exception ex) { Logger.Warning(ex, "Unable to index content item #{0} during rebuild", item.Id); } } if (contentItems.Count < ContentItemsPerLoop) { loop = false; } else { _transactionManager.RequireNew(); } } while (loop); } if (indexSettings.Mode == IndexingMode.Update) { Logger.Information("Updating index"); _indexingStatus = IndexingStatus.Updating; do { var indexingTasks = _taskRepository .Table.Where(x => x.Id > indexSettings.LastIndexedId) .OrderBy(x => x.Id) .Take(ContentItemsPerLoop) .ToList() .GroupBy(x => x.ContentItemRecord.Id) .Select(group => new { TaskId = group.Max(task => task.Id), Delete = group.Last().Action == IndexingTaskRecord.Delete, Id = group.Key, ContentItem = _contentManager.Get(group.Key, VersionOptions.Latest) }) .OrderBy(x => x.TaskId) .ToArray(); foreach (var item in indexingTasks) { try { IDocumentIndex documentIndex = null; // item.ContentItem can be null if the content item has been deleted if (item.ContentItem != null) { // skip items from types which are not indexed var settings = GetTypeIndexingSettings(item.ContentItem); if (settings.List.Contains(indexName)) { documentIndex = ExtractDocumentIndex(item.ContentItem); } else if (settings.List.Contains(indexName + ":latest")) { var latest = _contentManager.Get(item.Id, VersionOptions.Latest); documentIndex = ExtractDocumentIndex(latest); } } if (documentIndex == null || item.Delete) { deleteFromIndex.Add(item.Id); } else if (documentIndex.IsDirty) { addToIndex.Add(documentIndex); } indexSettings.LastIndexedId = item.TaskId; } catch (Exception ex) { Logger.Warning(ex, "Unable to index content item #{0} during update", item.Id); } } if (indexingTasks.Length < ContentItemsPerLoop) { loop = false; } else { _transactionManager.RequireNew(); } } while (loop); } // save current state of the index indexSettings.LastIndexedUtc = _clock.UtcNow; _appDataFolder.CreateFile(settingsFilename, indexSettings.ToXml()); if (deleteFromIndex.Count == 0 && addToIndex.Count == 0) { // nothing more to do _indexingStatus = IndexingStatus.Idle; return false; } // save new and updated documents to the index try { if (addToIndex.Count > 0) { _indexProvider.Store(indexName, addToIndex); Logger.Information("Added content items to index: {0}", addToIndex.Count); } } catch (Exception ex) { Logger.Warning(ex, "An error occured while adding a document to the index"); } // removing documents from the index try { if (deleteFromIndex.Count > 0) { _indexProvider.Delete(indexName, deleteFromIndex); Logger.Information("Added content items to index: {0}", addToIndex.Count); } } catch (Exception ex) { Logger.Warning(ex, "An error occured while removing a document from the index"); } return true; }
private void OnIndexingStatusEvent (IndexingStatus status) { if (status == IndexingStatus.Running) { NotificationMessage m = new NotificationMessage (); m.Icon = Gtk.Stock.DialogInfo; m.Title = Catalog.GetString ("Your data is being indexed"); m.Message = Catalog.GetString ("The search service is in the process of indexing your data. Search results may be incomplete until indexing has finished."); notification_area.Display (m); } else { notification_area.Hide (); } }
private void SendIndexingStatusResponse(IndexingStatus status) { Log.Debug("Sending indexing status change from {0} to {1}", crawl_status, status); this.crawl_status = status; this.SendAsyncResponse(new IndexingStatusResponse(status)); }
private void IndexingThreadFunc() { mStatus = IndexingStatus.Working; BuildIndex(mProjectRoot, "*"); mStatus = IndexingStatus.Done; }
public RepositoryInfo(int totalPackages, IndexingStatus indexingStatus) { TotalPackages = totalPackages; this.indexingStatus = indexingStatus; }
// Starts a new thread that continuously indexes new search items // as they become available private void BeginIndexing() { // An anonymous method that loops and does the // indexing when appropriate ThreadStart index = delegate { while (true) { // Wait if there is no work to do this.queueResetEvent.WaitOne(); // Get the next indexing operation, if one is available ModelObject modelObject = null; IndexingOperation indexingOperation = IndexingOperation.Register; lock (this.indexingQueue) { // If there are no indexing operations available, set // the status to ready and wait for more work if (this.indexingQueue.Count == 0) { this.Status = IndexingStatus.Ready; this.queueResetEvent.Reset(); continue; } // Otherwise, dequeue the next work item else { foreach (KeyValuePair <ModelObject, IndexingOperation> entry in this.indexingQueue) { modelObject = entry.Key; indexingOperation = entry.Value; break; } if (modelObject != null) { this.indexingQueue.Remove(modelObject); } } } // Process the indexing operation if (modelObject != null) { if (indexingOperation == IndexingOperation.Register) { this.RegisterModelObject(modelObject); } else if (indexingOperation == IndexingOperation.Unregister) { this.UnregisterModelObject(modelObject); } } } }; // Create a new thread and start the indexing loop Thread thread = new Thread(index); thread.Name = "Search Engine Indexer"; thread.IsBackground = true; thread.Priority = ThreadPriority.BelowNormal; thread.Start(); }
public IndexingStatusResponse (IndexingStatus status) { this.Status = status; }
public IndexingStatusResponse(IndexingStatus status) { this.Status = status; }
private void OnStateChanged (IndexingStatus status) { if (StateChanged == null) return; string[] state_info; if (status == IndexingStatus.NotRunning) state_info = new string[] { "IDLE", "" }; else state_info = GetState (); if (Debug) Console.Error.WriteLine ("StateChanged -- {0} - {1}", state_info [0], state_info [1]); StateChanged (state_info); }
private void SendIndexingStatusResponse (IndexingStatus status) { Log.Debug ("Sending indexing status change from {0} to {1}", crawl_status, status); this.crawl_status = status; this.SendAsyncResponse (new IndexingStatusResponse (status)); }