[SetUp] public void SetUp() { try { string[] files = System.IO.Directory.GetFiles(".", "_*"); foreach (string fileName in files) { System.IO.File.Delete(fileName); } } catch (Exception exc) { Assert.Fail(exc.Message); } OMEnv.DataDir = ".\\Data"; InitStorage(); for (int i = 0; i < _processors.Length; ++i) { _processors[i] = new AsyncProcessor(new AsyncExceptionHandler(ExceptionHandler), false); } MockPluginEnvironment env = new MockPluginEnvironment(_storage); env.SetCoreProps(new MockCoreProps()); indexer = new FullTextIndexer(); indexer.Initialize(); Core.ResourceStore.ResourceTypes.Register("TestType", "TestType", "Name", ResourceTypeFlags.Normal); }
[Test] public void TestMemoryAccessorsMerge() { MockPluginEnvironment env = new MockPluginEnvironment(_storage); env = env; FullTextIndexer indexer = new FullTextIndexer(); indexer.AddDocumentFragment(1, "one two\n\nthree fourplay"); indexer.AddDocumentFragment(2, "fourplay.\n\nthreeplay. "); indexer.EndBatchUpdate(); for (int i = 3; i < 50000; i++) { indexer.AddDocumentFragment(i, "Term" + i); } Console.WriteLine("Finished addition"); indexer.EndBatchUpdate(); Console.WriteLine("Finished Linking"); Entry[] aentry_Result; for (int i = 3; i < 50000; i++) { aentry_Result = indexer.ProcessQueryInternal("Term" + i); Assert.IsTrue(aentry_Result.Length == 1, "We must find exactly one instance of every term"); } Console.WriteLine("Finished quering"); indexer.DiscardTextIndex(); }
[Test] public void ConsistencyAfterUpdates() { MockPluginEnvironment env = new MockPluginEnvironment(_storage); env = env; FullTextIndexer indexer = new FullTextIndexer(); //----------------------------------------------------------------- indexer.AddDocumentFragment(100, "token1 token2 token3"); indexer.AddDocumentFragment(200, "token1 token2 token3"); indexer.EndBatchUpdate(); Entry[] aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 2, "Failed to find all documents"); //----------------------------------------------------------------- indexer.AddDocumentFragment(300, "token1 token2 token3"); indexer.AddDocumentFragment(400, "token1 token4 token5"); indexer.EndBatchUpdate(); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 4, "Failed to find all documents"); //----------------------------------------------------------------- indexer.AddDocumentFragment(500, "token1 token2 token3"); indexer.AddDocumentFragment(600, "token1 token4 token5"); indexer.EndBatchUpdate(); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 6, "Failed to find all documents"); //----------------------------------------------------------------- indexer.DeleteDocument(100); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 5, "Failed to find all documents"); indexer.DeleteDocument(600); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 4, "Failed to find all documents"); //----------------------------------------------------------------- for (int i = 0; i < 200000; i++) { indexer.AddDocumentFragment(i + 1000, "Term" + i); if (i % 100000 == 0) { indexer.AddDocumentFragment(i + 1000, "Token1"); Console.Write("."); } } indexer.EndBatchUpdate(); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 6, "Failed to find all documents"); //----------------------------------------------------------------- indexer.AddDocumentFragment(300, "token1 token2 token3"); indexer.AddDocumentFragment(400, "token1 token4 token5"); indexer.EndBatchUpdate(); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 8, "Failed to find all documents"); indexer.DiscardTextIndex(); }
internal TextIndexManager() : base(false) { _processPendingDocsDelegate = new DelegateJob("Indexing documents", new MethodInvoker(ProcessPendingDocs), new object[] {}); if (Core.ResourceStore.PropTypes.Exist("QueuedForIndexing")) { Core.ResourceStore.PropTypes.Delete(Core.ResourceStore.PropTypes["QueuedForIndexing"].Id); } _statusWriter = Core.UIManager.GetStatusWriter(typeof(FullTextIndexer), StatusPane.UI); _isJobTraceSuppressed = Core.SettingStore.ReadBool("TextIndexing", "SuppressJobTraces", false); _textIndexer = new FullTextIndexer(); _textIndexer.IndexLoaded += IndexLoadedNotification; Reenterable = false; ThreadName = "TextIndex AsyncProcessor"; ThreadPriority = System.Threading.ThreadPriority.BelowNormal; ThreadStarted += TextIndexProcessor_ThreadStarted; Core.PluginLoader.RegisterResourceTextProvider(null, new AnnotationTextIndexProvider()); Core.PluginLoader.RegisterResourceTextProvider(null, new TitleTextIndexProvider()); // Register predefined search providers CurrentSearchProvider = new OmeaGlobalSearchProvider(); RegisterSearchProvider(CurrentSearchProvider, "Omea Search", _cStandardProvidersGroupName); RegisterSearchProvider(new OmeaQuickSearchProvider(), "Local Search", _cStandardProvidersGroupName); DefragmentIndexJob._textIndexManager = IndexingJob._textIndexManager = this; SetupDefragmentationQueue(); _switchToIdleJob = new SwitchToIdleModeJob(this); QueueSwitchToIdleModeJob(); Core.UIManager.RegisterIndicatorLight("Text Index Manager", this, 30, MainFrame.LoadIconFromAssembly("textindex_idle.ico"), MainFrame.LoadIconFromAssembly("textindex_busy.ico"), MainFrame.LoadIconFromAssembly("textindex_stuck.ico")); }