[Test] public void ConsistencyAfterUpdates() { MockPluginEnvironment env = new MockPluginEnvironment(_storage); env = env; FullTextIndexer indexer = new FullTextIndexer(); //----------------------------------------------------------------- indexer.AddDocumentFragment(100, "token1 token2 token3"); indexer.AddDocumentFragment(200, "token1 token2 token3"); indexer.EndBatchUpdate(); Entry[] aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 2, "Failed to find all documents"); //----------------------------------------------------------------- indexer.AddDocumentFragment(300, "token1 token2 token3"); indexer.AddDocumentFragment(400, "token1 token4 token5"); indexer.EndBatchUpdate(); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 4, "Failed to find all documents"); //----------------------------------------------------------------- indexer.AddDocumentFragment(500, "token1 token2 token3"); indexer.AddDocumentFragment(600, "token1 token4 token5"); indexer.EndBatchUpdate(); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 6, "Failed to find all documents"); //----------------------------------------------------------------- indexer.DeleteDocument(100); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 5, "Failed to find all documents"); indexer.DeleteDocument(600); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 4, "Failed to find all documents"); //----------------------------------------------------------------- for (int i = 0; i < 200000; i++) { indexer.AddDocumentFragment(i + 1000, "Term" + i); if (i % 100000 == 0) { indexer.AddDocumentFragment(i + 1000, "Token1"); Console.Write("."); } } indexer.EndBatchUpdate(); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 6, "Failed to find all documents"); //----------------------------------------------------------------- indexer.AddDocumentFragment(300, "token1 token2 token3"); indexer.AddDocumentFragment(400, "token1 token4 token5"); indexer.EndBatchUpdate(); aentry_Result = indexer.ProcessQueryInternal("token1"); Assert.IsTrue(aentry_Result.Length == 8, "Failed to find all documents"); indexer.DiscardTextIndex(); }
[Test] public void TestMemoryAccessorsMerge() { MockPluginEnvironment env = new MockPluginEnvironment(_storage); env = env; FullTextIndexer indexer = new FullTextIndexer(); indexer.AddDocumentFragment(1, "one two\n\nthree fourplay"); indexer.AddDocumentFragment(2, "fourplay.\n\nthreeplay. "); indexer.EndBatchUpdate(); for (int i = 3; i < 50000; i++) { indexer.AddDocumentFragment(i, "Term" + i); } Console.WriteLine("Finished addition"); indexer.EndBatchUpdate(); Console.WriteLine("Finished Linking"); Entry[] aentry_Result; for (int i = 3; i < 50000; i++) { aentry_Result = indexer.ProcessQueryInternal("Term" + i); Assert.IsTrue(aentry_Result.Length == 1, "We must find exactly one instance of every term"); } Console.WriteLine("Finished quering"); indexer.DiscardTextIndex(); }
[Test] public void TestSimpleSequence() { // This is the main goal of this testing. indexer.ResourceProcessed += Handler_SimpleSequenceTest; IResource newRes = Core.ResourceStore.NewResource("TestType"); _savedID = newRes.Id; indexer.AddDocumentFragment(newRes.Id, "token1.token2. token3 token4.token5. "); indexer.EndBatchUpdate(); indexer.ResourceProcessed -= Handler_SimpleSequenceTest; }
internal void EndBatchUpdate() { _documentsIndexed = 0; try { _textIndexer.EndBatchUpdate(); } catch (FormatException ex) { Core.ReportException(ex, ExceptionReportFlags.AttachLog); RebuildIndex(); return; } catch (System.IO.IOException) { Core.UIManager.ShowSimpleMessageBox("Text Index Operation Failed", "System encountered a serious I/O error while constructing text index." + " Indexing operation will be suspended until next start of the Omea."); SuspendIndexingByError(); return; } if (_statusWriter != null) { _statusWriter.ClearStatus(); } if (IndexLoaded != null) { IndexLoaded(this, EventArgs.Empty); } AnalyzeFreeSpace(); }
[Test] public void TestDotDelimiters() { IResource newRes = Core.ResourceStore.NewResource("TestType"); indexer.AddDocumentFragment(newRes.Id, "token1.token2. token3 token4.token5. "); indexer.EndBatchUpdate(); Entry[] result = indexer.ProcessQueryInternal("token1"); Console.WriteLine(result[0].Offsets[0].Sentence); AssertIfTrue("We must find one token from the instance of the document", result.Length == 1); result = indexer.ProcessQueryInternal("token2"); Console.WriteLine(result[0].Offsets[0].Sentence); AssertIfTrue("We must find one token from the instance of the document", result.Length == 1); result = indexer.ProcessQueryInternal("token3"); Console.WriteLine(result[0].Offsets[0].Sentence); AssertIfTrue("We must find one token from the instance of the document", result.Length == 1); result = indexer.ProcessQueryInternal("token4"); Console.WriteLine(result[0].Offsets[0].Sentence); AssertIfTrue("We must find one token from the instance of the document", result.Length == 1); result = indexer.ProcessQueryInternal("token5"); Console.WriteLine(result[0].Offsets[0].Sentence); AssertIfTrue("We must find one token from the instance of the document", result.Length == 1); }
[Test] public void EmptyIndex() { indexer.EndBatchUpdate(); Assert.IsTrue(!indexer.IsDocumentPresentInternal(100)); }