private async Task WriteValuesToDocumentObject(IndexJob indexJob, VideoIndexResult videoIndexResult) { try { using (IObjectManager manager = this.Helper.GetServicesManager().CreateProxy <IObjectManager>(Relativity.API.ExecutionIdentity.System)) { var toUpdate = new RelativityObjectRef { ArtifactID = indexJob.DocumentArtifactID }; var fieldValuePair = new FieldValue("Kramerica Video Transcript", videoIndexResult.Transcript); var updateRequest = new UpdateRequest { Object = toUpdate, FieldValues = new List <FieldRefValuePair> { fieldValuePair } }; await manager.UpdateAsync(indexJob.WorkspaceArtifactID, updateRequest); } } catch (Exception ex) { LogError(ex); } }
public IndexJob GetCreateIndexJob(string indexPath, dtSearch.Engine.DataSource dtSource) { IndexJob indexJob = new IndexJob(); indexJob.DataSourceToIndex = dtSource; indexJob.IndexPath = indexPath; indexJob.ActionCreate = true; indexJob.ActionAdd = true; indexJob.ActionCompress = false; indexJob.CreateRelativePaths = false; indexJob.ExcludeFilters = null; indexJob.IncludeFilters = null; indexJob.IndexingFlags = 0; // Optional properties //StringCollection stored = new StringCollection(); //stored.Add("E-table_Docid"); //indexJob.StoredFields = stored; indexJob.TempFileDir = Path.GetTempPath(); indexJob.MaxMemToUseMB = 512; indexJob.AutoCommitIntervalMB = 512; return(indexJob); }
private async Task WriteValuesToIndexJob(IndexJob indexJob, VideoIndexResult videoIndexResult) { try { using (IObjectManager manager = this.Helper.GetServicesManager().CreateProxy <IObjectManager>(Relativity.API.ExecutionIdentity.System)) { var toUpdate = new RelativityObjectRef { ArtifactID = indexJob.JobArtifactID }; var fields = new FieldValue[] { new FieldValue("VideoID", videoIndexResult.VideoID), new FieldValue("Video File Name", videoIndexResult.VideoName), new FieldValue("DocumentControlNumber", videoIndexResult.ControlNumber) }; var updateRequest = new UpdateRequest { Object = toUpdate, FieldValues = fields }; await manager.UpdateAsync(indexJob.WorkspaceArtifactID, updateRequest); } } catch (Exception ex) { LogError(ex); } }
private async Task UpdateStatus(IndexJob indexJob, string status) { try { using (IObjectManager manager = this.Helper.GetServicesManager().CreateProxy <IObjectManager>(Relativity.API.ExecutionIdentity.System)) { var toUpdate = new RelativityObjectRef { ArtifactID = indexJob.JobArtifactID }; var fieldValuePair = new FieldValue("Status", status); var updateRequest = new UpdateRequest { Object = toUpdate, FieldValues = new List <FieldRefValuePair> { fieldValuePair } }; await manager.UpdateAsync(indexJob.WorkspaceArtifactID, updateRequest); } } catch (Exception ex) { LogError(ex); } }
private void ProcessJobs(IndexWriter writer) { while (jobs.Count != 0) { IndexJob job = jobs.Dequeue(); writer.DeleteDocuments(new Term("number", job.Id)); //如果“添加文章”任务再添加, if (job.JobType == JobType.Add) { CustomerEntity art = service.GetEntity(job.Id); if (art == null)//有可能刚添加就被删除了 { continue; } string EnCode = art.EnCode.ToString(); string FullName = art.FullName; string CreateDate = art.CreateDate.ToDateString(); string CreateUserName = art.CreateUserName; Document document = new Document(); //只有对需要全文检索的字段才ANALYZED document.Add(new Field("number", job.Id.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); document.Add(new Field("FullName", FullName, Field.Store.YES, Field.Index.ANALYZED, Lucene.Net.Documents.Field.TermVector.WITH_POSITIONS_OFFSETS)); document.Add(new Field("EnCode", EnCode, Field.Store.YES, Field.Index.NOT_ANALYZED)); document.Add(new Field("CreateDate", CreateDate, Field.Store.YES, Field.Index.NOT_ANALYZED)); document.Add(new Field("CreateUserName", CreateUserName, Field.Store.YES, Field.Index.ANALYZED, Lucene.Net.Documents.Field.TermVector.WITH_POSITIONS_OFFSETS)); writer.AddDocument(document); Logger.Debug("索引" + job.Id + "完毕"); } } }
public void RemoveCustomer(string customerId) { IndexJob job = new IndexJob(); job.JobType = JobType.Remove; job.Id = customerId; Logger.Debug(customerId + "加入删除任务列表"); jobs.Enqueue(job);//把任务加入商品库 }
private void RemoveOldIndexItems(LoginUser loginUser, string indexPath, Organization organization, ReferenceType referenceType, string deletedIndexItemsFileName) { LogVerbose("Removing deleted items: " + referenceType.ToString()); if (!Directory.Exists(indexPath)) { Logs.WriteEvent("Path does not exist: " + indexPath); return; } DeletedIndexItems items = new DeletedIndexItems(loginUser); LogVerbose(string.Format("Retrieving deleted items: RefType: {0}, OrgID: {1}", referenceType.ToString(), organization.OrganizationID.ToString())); items.LoadByReferenceType(referenceType, organization.OrganizationID); if (items.IsEmpty) { LogVerbose("No Items to delete"); return; } StringBuilder builder = new StringBuilder(); foreach (DeletedIndexItem item in items) { builder.AppendLine(item.RefID.ToString()); } string fileName = Path.Combine(indexPath, deletedIndexItemsFileName); if (File.Exists(fileName)) { File.Delete(fileName); } using (StreamWriter writer = new StreamWriter(fileName)) { LogVerbose("Adding IDs to delete file: " + builder.ToString()); writer.Write(builder.ToString()); } LogVerbose("Deleting Items"); using (IndexJob job = new IndexJob()) { job.IndexPath = indexPath; job.ActionCreate = false; job.ActionAdd = false; job.ActionRemoveListed = true; job.ToRemoveListName = fileName; job.CreateRelativePaths = false; job.Execute(); } LogVerbose("Items deleted"); UpdateHealth(); items.DeleteAll(); items.Save(); LogVerbose("Finished Removing Old Indexes - OrgID = " + organization.OrganizationID + " - " + referenceType.ToString()); }
private bool IndexExists(string indexDir) { if (!Directory.Exists(indexDir)) { return(false); } IndexInfo indexInfo = IndexJob.GetIndexInfo(indexDir); return(indexInfo.IndexSize != 0); }
private void ExecuteIndexJob(IndexJob job) { _buttonIndexIt.Enabled = false; // Start index job execution in a separate thread job.ActionCreate = !IndexExists(job.IndexPath); job.ExecuteInThread(); DoExecution(job, _textBoxIndexStatus); // Reset flags and controls _buttonIndexIt.Enabled = true; }
private async Task <bool> ExecuteAsync() { bool executedSuccessfully = true; try { if (GetConfigurationValues()) { _indexerApiUrl = "https://api.videoindexer.ai"; //Retrieve _indexerApiKey from secret store _indexerApiKey = ""; //Add your own credentials here //Get job details IndexJob indexJob = new IndexJob(); indexJob = GetSingleJob(); if (indexJob.Success) { await UpdateStatus(indexJob, "In Progress"); //Get job document details DocProperty doc = new DocProperty(); doc = await GetDocProperty(indexJob.WorkspaceArtifactID, indexJob.DocumentArtifactID); if (doc.Success) { //Launch video for indexing VideoIndex videoIndex = new VideoIndex(doc.Path, doc.Filename, doc.Begdoc, _indexerApiUrl, _indexerApiKey, _logger, Helper); VideoIndexResult videoIndexResult = await videoIndex.Sample(); //Update job with index details. We will use these details later in the custom page. await WriteValuesToIndexJob(indexJob, videoIndexResult); await WriteValuesToDocumentObject(indexJob, videoIndexResult); //Write transcript to document field } CleanupQueue(indexJob); await UpdateStatus(indexJob, "Complete"); } } const int maxMessageLevel = 10; RaiseMessage("Completed.", maxMessageLevel); } catch (Exception ex) { LogError(ex); executedSuccessfully = false; } return(executedSuccessfully); }
private IndexJob GetSingleJob() { IndexJob indexJob = new IndexJob(); indexJob.DocumentArtifactID = 0; indexJob.JobArtifactID = 0; indexJob.WorkspaceArtifactID = 0; try { //Tag single doc to process string tagSql = @" UPDATE TOP (1) [eddsdbo].[KramericaVideoIndexQueue] SET [AgentID] = @agentIdParam, [Status] = 1 WHERE [AgentID] IS NULL AND [Status] = 0" ; SqlParameter agentIdParam = new SqlParameter("@agentIdParam", System.Data.SqlDbType.Int); agentIdParam.Value = AgentID; Helper.GetDBContext(-1).ExecuteNonQuerySQLStatement(tagSql, new SqlParameter[] { agentIdParam }); string getJobSql = @" SELECT TOP 1 * FROM [eddsdbo].[KramericaVideoIndexQueue] WHERE [AgentID] = @agentIdParam AND [Status] = 1" ; DataTable tbl = new DataTable(); tbl = Helper.GetDBContext(-1).ExecuteSqlStatementAsDataTable(getJobSql, new SqlParameter[] { agentIdParam }); if (tbl.Rows.Count > 0) { indexJob.DocumentArtifactID = Convert.ToInt32(tbl.Rows[0]["DocumentArtifactID"].ToString()) as int? ?? 0; indexJob.JobArtifactID = Convert.ToInt32(tbl.Rows[0]["JobArtifactID"].ToString()) as int? ?? 0; indexJob.WorkspaceArtifactID = Convert.ToInt32(tbl.Rows[0]["WorkspaceArtifactID"].ToString()) as int? ?? 0; } indexJob.Success = (indexJob.DocumentArtifactID > 0 && indexJob.JobArtifactID > 0 && indexJob.WorkspaceArtifactID > 0); } catch (Exception ex) { LogError(ex); } return(indexJob); }
private void CleanupQueue(IndexJob indexJob) { try { string sql = @"DELETE FROM [eddsdbo].[KramericaVideoIndexQueue] WHERE [WorkspaceArtifactID] = @workspaceArtifactIDParam AND [JobArtifactID] = @jobArtifactIDParam"; SqlParameter workspaceArtifactIDParam = new SqlParameter("@workspaceArtifactIDParam", SqlDbType.Int) { Value = indexJob.WorkspaceArtifactID }; SqlParameter jobArtifactIDParam = new SqlParameter("@jobArtifactIDParam", SqlDbType.Int) { Value = indexJob.JobArtifactID }; Helper.GetDBContext(-1).ExecuteNonQuerySQLStatement(sql, new SqlParameter[] { workspaceArtifactIDParam, jobArtifactIDParam }); } catch (Exception ex) { LogError(ex); } }
private void OnMergeIndex(object sender, EventArgs e) { string indexRoot = _textBoxMergeRoot.Text; StringCollection indexDirs = GetIndexDirectories(indexRoot, false); if (indexDirs == null) { return; } bool compressIndex = _checkBoxCompressIndex.Checked; _buttonMergeIndex.Enabled = false; StartWatchIt("Merge, " + _targetTbl); { using (IndexJob job = new IndexJob()) { job.IndexPath = indexRoot; job.CreateRelativePaths = false; job.IndexesToMerge = indexDirs; job.ActionMerge = true; job.ActionCompress = compressIndex; job.ExecuteInThread(); DoExecution(job, _textBoxMergeStatus); } if (_checkBoxCleanUp.Checked) { foreach (string subDir in indexDirs) { Directory.Delete(subDir, true); } } } StopWatchIt("Merge, " + _targetTbl); _buttonMergeIndex.Enabled = true; }
private void LoadIndex(string dir, ulong collection) { var timer = new Stopwatch(); var batchTimer = new Stopwatch(); timer.Start(); var files = Directory.GetFiles(dir, "*.docs"); _log.Log(string.Format("index scan found {0} document files", files.Length)); foreach (var docFileName in files) { var name = Path.GetFileNameWithoutExtension(docFileName) .Split(".", StringSplitOptions.RemoveEmptyEntries); var collectionId = ulong.Parse(name[0]); if (collectionId == collection) { using (var readSession = new DocumentReadSession(collectionId, _sessionFactory)) { var docs = readSession.ReadDocs(); var job = new IndexJob(collectionId, docs); using (var writeSession = _sessionFactory.CreateWriteSession(collectionId)) { writeSession.WriteToIndex(job); } _log.Log(string.Format("loaded batch into {0} in {1}", collectionId, batchTimer.Elapsed)); } break; } } _log.Log(string.Format("loaded {0} indexes in {1}", files.Length, timer.Elapsed)); }
public void Start() { Console.WriteLine("FilmIndexHost Starting..."); var appDataPath = ConfigurationManager.AppSettings["ExportPath"]; Console.WriteLine($"AppDataPath: {appDataPath}"); var filmPaths = new List <string>(); var searchPaths = ConfigurationManager.GetSection("indexInfo/searchPath") as NameValueCollection; for (int i = 0; i < searchPaths.Count; i++) { filmPaths.Add(searchPaths[i]); Console.WriteLine($"SearchPath: {searchPaths[i]}"); } var paths = new List <string>(); var samplePaths = ConfigurationManager.GetSection("indexInfo/samplePath") as NameValueCollection; for (int i = 0; i < samplePaths.Count; i++) { paths.Add(samplePaths[i]); Console.WriteLine($"SamplePath: {samplePaths[i]}"); } var indexJob = new IndexJob { AppDataPath = appDataPath, FilmPaths = filmPaths, SamplePaths = paths }; Console.WriteLine($"Start Indexing..."); var sw = new Stopwatch(); sw.Start(); indexJob.Execute(); sw.Stop(); Console.WriteLine($"End Indexing..."); Console.WriteLine($"elapse time: {sw.ElapsedMilliseconds}"); Console.ReadKey(); }
private List <IndexJob> BuildIndexJobs(string tableName, long startRow, long batchSize, long endRow, string indexFilePath) { const string BatchFmt = "{0} WHERE [__RowNumber] >= {1} AND [__RowNumber] < {2} "; StringBuilder selectSqlBase = new StringBuilder(); selectSqlBase.AppendFormat("SELECT TOP {0} * FROM [{1}] WITH (NOLOCK)", endRow, tableName); bool useBatch = (endRow > batchSize); string selectSql = null; List <IndexJob> jobs = new List <IndexJob>(); while (startRow < endRow) { if (useBatch) { selectSql = string.Format(BatchFmt, selectSqlBase.ToString(), startRow, startRow + batchSize > endRow ? endRow : startRow + batchSize); } else { selectSql = selectSqlBase.ToString(); } DTTableSource dataSource = new DTTableSource(null); dataSource.TableInfo = new TableInfo(tableName, selectSql, null); string indexPath = startRow > 1 ? Path.Combine(indexFilePath, startRow.ToString()) : indexFilePath; IndexJob indexJob = _factory.GetCreateIndexJob(indexPath, dataSource); jobs.Add(indexJob); startRow += batchSize; } return(jobs); }
private static void Reindex(string dir, int batchSize) { var timer = new Stopwatch(); var batchTimer = new Stopwatch(); timer.Start(); var files = Directory.GetFiles(dir, "*.docs"); Console.WriteLine("re-indexing process found {0} document files", files.Length); foreach (var docFileName in files) { var name = Path.GetFileNameWithoutExtension(docFileName) .Split(".", StringSplitOptions.RemoveEmptyEntries); var collectionId = ulong.Parse(name[0]); using (var readSession = new DocumentReadSession(collectionId, new LocalStorageSessionFactory(dir, new LatinTokenizer()))) { foreach (var batch in readSession.ReadDocs().Batch(batchSize)) { batchTimer.Restart(); using (var writeSession = new LocalStorageSessionFactory(dir, new LatinTokenizer()).CreateWriteSession(collectionId)) { var job = new IndexJob(collectionId, batch); writeSession.WriteToIndex(job); } Console.WriteLine("wrote batch to {0} in {1}", collectionId, batchTimer.Elapsed); } } } Console.WriteLine("rebuilt {0} indexes in {1}", files.Length, timer.Elapsed); }
public DtCrud() { indexjob = new IndexJob(); }
private void ProcessIndex(Organization organization, ReferenceType referenceType, bool isRebuilder) { if (IsStopped) { return; } string indexPath = string.Empty; string deletedIndexItemsFileName = string.Empty; string storedFields = string.Empty; string tableName = string.Empty; string primaryKeyName = string.Empty; IndexDataSource indexDataSource = null; int maxRecords = Settings.ReadInt("Max Records", 1000); switch (referenceType) { case ReferenceType.Tickets: indexPath = "\\Tickets"; deletedIndexItemsFileName = "DeletedTickets.txt"; storedFields = "TicketID OrganizationID TicketNumber Name IsKnowledgeBase Status Severity DateModified DateCreated DateClosed SlaViolationDate SlaWarningDate"; tableName = "Tickets"; primaryKeyName = "TicketID"; indexDataSource = new TicketIndexDataSource(LoginUser, maxRecords, organization.OrganizationID, tableName, isRebuilder, Logs); break; case ReferenceType.Wikis: indexPath = "\\Wikis"; deletedIndexItemsFileName = "DeletedWikis.txt"; storedFields = "OrganizationID Creator Modifier"; tableName = "WikiArticles"; primaryKeyName = "ArticleID"; indexDataSource = new WikiIndexDataSource(LoginUser, maxRecords, organization.OrganizationID, tableName, isRebuilder, Logs); break; case ReferenceType.Notes: indexPath = "\\Notes"; deletedIndexItemsFileName = "DeletedNotes.txt"; tableName = "Notes"; primaryKeyName = "NoteID"; indexDataSource = new NoteIndexDataSource(LoginUser, maxRecords, organization.OrganizationID, tableName, isRebuilder, Logs); break; case ReferenceType.ProductVersions: indexPath = "\\ProductVersions"; deletedIndexItemsFileName = "DeletedProductVersions.txt"; tableName = "ProductVersions"; primaryKeyName = "ProductVersionID"; indexDataSource = new ProductVersionIndexDataSource(LoginUser, maxRecords, organization.OrganizationID, tableName, isRebuilder, Logs); break; case ReferenceType.WaterCooler: indexPath = "\\WaterCooler"; deletedIndexItemsFileName = "DeletedWaterCoolerMessages.txt"; tableName = "WatercoolerMsg"; primaryKeyName = "MessageID"; indexDataSource = new WaterCoolerIndexDataSource(LoginUser, maxRecords, organization.OrganizationID, tableName, isRebuilder, Logs); break; case ReferenceType.Organizations: indexPath = "\\Customers"; deletedIndexItemsFileName = "DeletedCustomers.txt"; storedFields = "Name JSON"; tableName = "Organizations"; primaryKeyName = "OrganizationID"; indexDataSource = new CustomerIndexDataSource(LoginUser, maxRecords, organization.OrganizationID, tableName, isRebuilder, Logs); break; case ReferenceType.Contacts: indexPath = "\\Contacts"; deletedIndexItemsFileName = "DeletedContacts.txt"; storedFields = "Name JSON"; tableName = "Users"; primaryKeyName = "UserID"; indexDataSource = new ContactIndexDataSource(LoginUser, maxRecords, organization.OrganizationID, tableName, isRebuilder, Logs); break; case ReferenceType.Assets: indexPath = "\\Assets"; deletedIndexItemsFileName = "DeletedAssets.txt"; storedFields = "Name JSON"; tableName = "Assets"; primaryKeyName = "AssetID"; indexDataSource = new AssetIndexDataSource(LoginUser, maxRecords, organization.OrganizationID, tableName, isRebuilder, Logs); break; case ReferenceType.Products: indexPath = "\\Products"; deletedIndexItemsFileName = "DeletedProducts.txt"; storedFields = "Name JSON"; tableName = "Products"; primaryKeyName = "ProductID"; indexDataSource = new ProductIndexDataSource(LoginUser, maxRecords, organization.OrganizationID, tableName, isRebuilder, Logs); break; case ReferenceType.Tasks: indexPath = "\\Tasks"; deletedIndexItemsFileName = "DeletedTasks.txt"; storedFields = "Name JSON"; tableName = "Tasks"; primaryKeyName = "TaskID"; indexDataSource = new TaskIndexDataSource(LoginUser, maxRecords, organization.OrganizationID, tableName, isRebuilder, Logs); break; default: throw new System.ArgumentException("ReferenceType " + referenceType.ToString() + " is not supported by indexer."); } string root = Settings.ReadString("Tickets Index Path", "c:\\Indexes"); string mainIndexPath = Path.Combine(root, organization.OrganizationID.ToString() + indexPath); if (isRebuilder) { indexPath = "\\Rebuild" + indexPath; } string path = Path.Combine(Settings.ReadString("Tickets Index Path", "c:\\Indexes"), organization.OrganizationID.ToString() + indexPath); LogVerbose("Path: " + path); bool isNew = !System.IO.Directory.Exists(path); if (isNew) { Directory.CreateDirectory(path); LogVerbose("Creating path: " + path); } if (isRebuilder) { DeleteIndex(path); } try { if (!isRebuilder && !organization.IsRebuildingIndex) { RemoveOldIndexItems(LoginUser, path, organization, referenceType, deletedIndexItemsFileName); } } catch (Exception ex) { Logs.WriteException(ex); ExceptionLogs.LogException(LoginUser, ex, "Indexer.RemoveOldIndexItems - " + referenceType.ToString() + " - " + organization.OrganizationID.ToString()); } string noiseFile = Path.Combine(root, "noise.dat"); if (!File.Exists(noiseFile)) { File.Create(noiseFile).Dispose(); } Options options = new Options(); options.TextFlags = TextFlags.dtsoTfRecognizeDates; options.NoiseWordFile = noiseFile; options.Save(); LogVerbose("Processing " + tableName); using (IndexJob job = new IndexJob()) { job.DataSourceToIndex = indexDataSource; job.IndexPath = path; job.ActionCreate = isNew || isRebuilder; job.ActionAdd = true; job.CreateRelativePaths = false; job.StoredFields = Server.Tokenize(storedFields); job.IndexingFlags = IndexingFlags.dtsAlwaysAdd; //string tempPath = Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, "TempIndexFiles" + indexPath); //if (!Directory.Exists(tempPath)) Directory.CreateDirectory(tempPath); //job.TempFileDir = tempPath; bool doCompress = false; if (_threadPosition % 2 == 0 && (DateTime.Now.DayOfWeek == DayOfWeek.Saturday || DateTime.Now.DayOfWeek == DayOfWeek.Sunday)) { IndexInfo info = new IndexInfo(); info = IndexJob.GetIndexInfo(path); LogVerbose("Info - Doc Count:" + info.DocCount.ToString()); LogVerbose("Info - Obsolete:" + info.ObsoleteCount.ToString()); doCompress = (info.ObsoleteCount / info.DocCount) > 0.2; if (doCompress) { job.ActionCompress = true; job.ActionVerify = true; LogVerbose("Compressing"); } } try { job.ExecuteInThread(); // Monitor the job execution thread as it progresses IndexProgressInfo status = new IndexProgressInfo(); while (job.IsThreadDone(1000, status) == false) { if (IsStopped) { job.AbortThread(); } } } catch (Exception ex) { ExceptionLogs.LogException(LoginUser, ex, "Index Job Processor - " + referenceType.ToString() + " - " + organization.OrganizationID.ToString()); Logs.WriteException(ex); throw; } if (doCompress) { IndexInfo info = new IndexInfo(); info = IndexJob.GetIndexInfo(path); LogVerbose("Compressed"); LogVerbose("Info - Doc Count:" + info.DocCount.ToString()); LogVerbose("Info - Obsolete:" + info.ObsoleteCount.ToString()); } if (!IsStopped) { if (!isRebuilder) { Organization tempOrg = Organizations.GetOrganization(_loginUser, organization.OrganizationID); if (!tempOrg.IsRebuildingIndex) { UpdateItems(indexDataSource, tableName, primaryKeyName); } } else { MoveRebuiltIndex(organization.OrganizationID, mainIndexPath, path); } } } }
private void DoExecution(IndexJob job, TextBox textBoxStatus) { // Monitor the job execution thread as it progresses IndexProgressInfo status = new IndexProgressInfo(); while (job.IsThreadDone(500, status) == false) { // Set the status text based on the current indexing step switch (status.Step) { case IndexingStep.ixStepBegin: textBoxStatus.Text = "Opening index"; break; case IndexingStep.ixStepCheckingFiles: textBoxStatus.Text = "Checking files"; break; case IndexingStep.ixStepCompressing: textBoxStatus.Text = "Compressing index"; break; case IndexingStep.ixStepCreatingIndex: textBoxStatus.Text = "Creating index"; break; case IndexingStep.ixStepDone: textBoxStatus.Text = "Indexing Complete"; break; case IndexingStep.ixStepMerging: textBoxStatus.Text = "Merging words into index"; break; case IndexingStep.ixStepNone: textBoxStatus.Text = string.Empty; break; case IndexingStep.ixStepReadingFiles: textBoxStatus.Text = status.File.Name; break; case IndexingStep.ixStepStoringWords: textBoxStatus.Text = status.File.Name + " (storing words)"; break; default: textBoxStatus.Text = string.Empty; break; } // Let other form events be handled while we're looping Application.DoEvents(); DTTableSource ds = (DTTableSource)job.DataSourceToIndex; if (ds != null) // Only applies to Indexing job { _textBoxProcessed.Text = ds.RecordProcessed.ToString(); if (_stopRequested) { ds.StopRequested = true; } } } // If there were errors, display the errors as additions to the // status text JobErrorInfo err = job.Errors; for (int i = 0; i < err.Count; i++) { textBoxStatus.Text = textBoxStatus.Text + " " + err.Message(i); } }