private void AssertMatchEquals(IEnumerable <FSTCompletion.Completion> res, params string[] expected) { string[] result = new string[res.Count()]; for (int i = 0; i < res.Count(); i++) { result[i] = res.ElementAt(i).toString(); } if (!ArrayEqualityComparer <string> .OneDimensional.Equals(StripScore(expected), StripScore(result))) { int colLen = Math.Max(MaxLen(expected), MaxLen(result)); StringBuilder b = new StringBuilder(); string format = "{0," + colLen + "} {1," + colLen + "}\n"; b.append(string.Format(CultureInfo.InvariantCulture, format, "Expected", "Result")); for (int i = 0; i < Math.Max(result.Length, expected.Length); i++) { b.append(string.Format(CultureInfo.InvariantCulture, format, i < expected.Length ? expected[i] : "--", i < result.Length ? result[i] : "--")); } Console.WriteLine(b.toString()); fail("Expected different output:\n" + b.toString()); } }
private void TestRandomWords(int maxNumWords, int numIter) { Random random = new Random(Random().Next()); for (int iter = 0; iter < numIter; iter++) { if (VERBOSE) { Console.WriteLine("\nTEST: iter " + iter); } for (int inputMode = 0; inputMode < 2; inputMode++) { int numWords = random.nextInt(maxNumWords + 1); ISet <Int32sRef> termsSet = new HashSet <Int32sRef>(); Int32sRef[] terms = new Int32sRef[numWords]; while (termsSet.size() < numWords) { string term = FSTTester <object> .GetRandomString(random); termsSet.Add(FSTTester <object> .ToIntsRef(term, inputMode)); } DoTest(inputMode, termsSet.ToArray()); } } }
public override IndexInputSlicer CreateSlicer(string name, IOContext context) { lock (this) { EnsureOpen(); if (VERBOSE) { Console.WriteLine("nrtdir.openInput name=" + name); } #pragma warning disable 612, 618 if (cache.FileExists(name)) #pragma warning restore 612, 618 { if (VERBOSE) { Console.WriteLine(" from cache"); } return(cache.CreateSlicer(name, context)); } else { return(@delegate.CreateSlicer(name, context)); } } }
public static void Benchmarks() { AssumeTrue("Turn VERBOSE on or otherwise you won't see the results.", VERBOSE); int maxSize = AtLeast(100000); PriorityQueue <int?> pq = new IntegerQueue(maxSize); int?[] elements = new int?[maxSize]; for (int i = 0; i < maxSize; i++) { elements[i] = Random().Next(); } Console.WriteLine("Random list of elements..."); TimedAddAndPop <int?>(pq, elements); pq.Clear(); Console.WriteLine("\nSorted list of elements..."); pq = new IntegerQueue(maxSize); ArrayUtil.IntroSort(elements, new Less()); TimedAddAndPop <int?>(pq, elements); pq.Clear(); Console.WriteLine("\nReverse sorted list of elements..."); pq = new IntegerQueue(maxSize); ArrayUtil.IntroSort(elements, new Greater()); TimedAddAndPop <int?>(pq, elements); pq.Clear(); }
private void PrintGroups(List <AbstractDistinctValuesCollector.IGroupCount <IComparable> > results) { for (int i = 0; i < results.size(); i++) { var group = results[i]; object gv = group.GroupValue; if (gv is BytesRef) { Console.WriteLine(i + ": groupValue=" + ((BytesRef)gv).Utf8ToString()); } else { Console.WriteLine(i + ": groupValue=" + gv); } foreach (object o in group.UniqueValues) { if (o is BytesRef) { Console.WriteLine(" " + ((BytesRef)o).Utf8ToString()); } else { Console.WriteLine(" " + o); } } } }
private void DoRandomForceMerge() { if (DoRandomForceMerge_Renamed) { int segCount = w.SegmentCount; if (r.NextBoolean() || segCount == 0) { // full forceMerge if (LuceneTestCase.VERBOSE) { Console.WriteLine("RIW: doRandomForceMerge(1)"); } w.ForceMerge(1); } else { // partial forceMerge int limit = TestUtil.NextInt(r, 1, segCount); if (LuceneTestCase.VERBOSE) { Console.WriteLine("RIW: doRandomForceMerge(" + limit + ")"); } w.ForceMerge(limit); Debug.Assert(!DoRandomForceMergeAssert_Renamed || w.SegmentCount <= limit, "limit=" + limit + " actual=" + w.SegmentCount); } } }
// Simply write to console - choosen to be independant of log4j etc private void Log(string message) { if (VERBOSE) { Console.WriteLine(message); } }
private void SetNewSuffixStart(BytesRef br1, BytesRef br2) { int limit = Math.Min(br1.Length, br2.Length); int lastStart = 0; for (int i = 0; i < limit; i++) { if ((br1.Bytes[br1.Offset + i] & 0xc0) == 0xc0 || (br1.Bytes[br1.Offset + i] & 0x80) == 0) { lastStart = i; } if (br1.Bytes[br1.Offset + i] != br2.Bytes[br2.Offset + i]) { newSuffixStart = lastStart; if (DEBUG_SURROGATES) { Console.WriteLine(" set newSuffixStart=" + newSuffixStart); } return; } } newSuffixStart = limit; if (DEBUG_SURROGATES) { Console.WriteLine(" set newSuffixStart=" + newSuffixStart); } }
private void _DoRandomForceMerge() // LUCENENET specific - added leading underscore to keep this from colliding with the DoRandomForceMerge property { if (doRandomForceMerge) { int segCount = IndexWriter.SegmentCount; if (r.NextBoolean() || segCount == 0) { // full forceMerge if (LuceneTestCase.VERBOSE) { Console.WriteLine("RIW: doRandomForceMerge(1)"); } IndexWriter.ForceMerge(1); } else { // partial forceMerge int limit = TestUtil.NextInt32(r, 1, segCount); if (LuceneTestCase.VERBOSE) { Console.WriteLine("RIW: doRandomForceMerge(" + limit + ")"); } IndexWriter.ForceMerge(limit); Debug.Assert(!doRandomForceMergeAssert || IndexWriter.SegmentCount <= limit, "limit=" + limit + " actual=" + IndexWriter.SegmentCount); } } }
public virtual void TestPayloadSpanUtil() { Directory directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer(this)).SetSimilarity(similarity)); Document doc = new Document(); doc.Add(NewTextField(PayloadHelper.FIELD, "xx rr yy mm pp", Field.Store.YES)); writer.AddDocument(doc); IndexReader reader = writer.GetReader(); writer.Dispose(); IndexSearcher searcher = NewSearcher(reader); PayloadSpanUtil psu = new PayloadSpanUtil(searcher.TopReaderContext); var payloads = psu.GetPayloadsForQuery(new TermQuery(new Term(PayloadHelper.FIELD, "rr"))); if (VERBOSE) { Console.WriteLine("Num payloads:" + payloads.Count); foreach (var bytes in payloads) { Console.WriteLine(Encoding.UTF8.GetString(bytes)); } } reader.Dispose(); directory.Dispose(); }
private void CheckSpans(Spans spans, int numSpans, int[] numPayloads) { int cnt = 0; while (spans.Next() == true) { if (VERBOSE) { Console.WriteLine("\nSpans Dump --"); } if (spans.IsPayloadAvailable) { var payload = spans.GetPayload(); if (VERBOSE) { Console.WriteLine("payloads for span:" + payload.Count); foreach (var bytes in payload) { Console.WriteLine("doc:" + spans.Doc + " s:" + spans.Start + " e:" + spans.End + " " + Encoding.UTF8.GetString(bytes)); } } Assert.AreEqual(numPayloads[cnt], payload.Count); } else { Assert.IsFalse(numPayloads.Length > 0 && numPayloads[cnt] > 0, "Expected spans:" + numPayloads[cnt] + " found: 0"); } cnt++; } Assert.AreEqual(numSpans, cnt); }
public void TestTrying() { BytesRef lake = new BytesRef("lake"); BytesRef star = new BytesRef("star"); BytesRef ret = new BytesRef("ret"); Input[] keys = new Input[] { new Input("top of the lake", 15, lake), new Input("star wars: episode v - the empire strikes back", 12, star), new Input("the returned", 10, ret), }; DirectoryInfo tempDir = CreateTempDir("BlendedInfixSuggesterTest"); Analyzer a = new StandardAnalyzer(TEST_VERSION_CURRENT, CharArraySet.EMPTY_SET); // if factor is small, we don't get the expected element BlendedInfixSuggester suggester = new BlendedInfixSuggester(TEST_VERSION_CURRENT, NewFSDirectory(tempDir), a, a, AnalyzingInfixSuggester.DEFAULT_MIN_PREFIX_CHARS, BlendedInfixSuggester.BlenderType.POSITION_RECIPROCAL, BlendedInfixSuggester.DEFAULT_NUM_FACTOR); suggester.Build(new InputArrayIterator(keys)); IList <Lookup.LookupResult> responses = suggester.DoLookup("the", null, 4, true, false); foreach (Lookup.LookupResult response in responses) { Console.WriteLine(response); } suggester.Dispose(); }
public override void Run() { try { while (Run_Renamed) { //int n = random.nextInt(2); if (Type == 0) { int i = OuterInstance.Seq.AddAndGet(1); Document doc = DocHelper.CreateDocument(i, "index1", 10); Writer.AddDocument(doc); AddCount++; } else if (Type == 1) { // we may or may not delete because the term may not exist, // however we're opening and closing the reader rapidly IndexReader reader = Writer.GetReader(); int id = r.Next(OuterInstance.Seq); Term term = new Term("id", Convert.ToString(id)); int count = TestIndexWriterReader.Count(term, reader); Writer.DeleteDocuments(term); reader.Dispose(); DelCount += count; } } } catch (Exception ex) { Console.WriteLine(ex.StackTrace); this.Ex = ex; Run_Renamed = false; } }
public override void Run() { try { long ramSize = 0; while (PendingDocs.DecrementAndGet() > -1) { Document doc = Docs.NextDoc(); Writer.AddDocument(doc); long newRamSize = Writer.RamSizeInBytes(); if (newRamSize != ramSize) { ramSize = newRamSize; } if (DoRandomCommit) { if (Rarely()) { Writer.Commit(); } } } Writer.Commit(); } catch (Exception ex) { Console.WriteLine("FAILED exc:"); Console.WriteLine(ex.StackTrace); throw new Exception(ex.Message, ex); } }
public override void SetUp() { base.SetUp(); Dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random, Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000))); Document doc = new Document(); Field field = NewStringField("field", "", Field.Store.NO); doc.Add(field); for (int i = 0; i < 1000; i++) { field.SetStringValue(i.ToString("D3")); writer.AddDocument(doc); } Reader = writer.GetReader(); Searcher = NewSearcher(Reader); writer.Dispose(); if (VERBOSE) { Console.WriteLine("TEST: setUp searcher=" + Searcher); } }
public override void Run() { long stopTime = Environment.TickCount + (long)RUN_TIME_MSEC; Count = 0; try { do { if (AnyErrors()) { break; } DoWork(); Count++; } while (Environment.TickCount < stopTime); } catch (Exception e) { Console.WriteLine(Thread.CurrentThread.Name + ": exc"); Console.WriteLine(e.StackTrace); Failed = true; } }
private void DoTestZeroPosIncrSloppy(Query q, int nExpected) { Directory dir = NewDirectory(); // random dir IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); IndexWriter writer = new IndexWriter(dir, cfg); Document doc = new Document(); doc.Add(new TextField("field", new CannedTokenStream(INCR_0_DOC_TOKENS))); writer.AddDocument(doc); IndexReader r = DirectoryReader.Open(writer, false); writer.Dispose(); IndexSearcher s = NewSearcher(r); if (VERBOSE) { Console.WriteLine("QUERY=" + q); } TopDocs hits = s.Search(q, 1); Assert.AreEqual(nExpected, hits.TotalHits, "wrong number of results"); if (VERBOSE) { for (int hit = 0; hit < hits.TotalHits; hit++) { ScoreDoc sd = hits.ScoreDocs[hit]; Console.WriteLine(" hit doc=" + sd.Doc + " score=" + sd.Score); } } r.Dispose(); dir.Dispose(); }
public override void Run() { while (!stop) { try { // Sleep for up to 20 msec: Thread.Sleep(Random.Next(20)); if (VERBOSE) { Console.WriteLine("TEST: reopen"); } mgr.MaybeRefresh(); if (VERBOSE) { Console.WriteLine("TEST: reopen done"); } } catch (Exception ioe) { throw new Exception(ioe.Message, ioe); } } }
public override void Run() { try { for (int j = 0; j < NUM_ITER2; j++) { WriterFinal.ForceMerge(1, false); for (int k = 0; k < 17 * (1 + IFinal); k++) { Document d = new Document(); d.Add(NewField("id", IterFinal + "_" + IFinal + "_" + j + "_" + k, CustomType)); d.Add(NewField("contents", English.Int32ToEnglish(IFinal + k), CustomType)); WriterFinal.AddDocument(d); } for (int k = 0; k < 9 * (1 + IFinal); k++) { WriterFinal.DeleteDocuments(new Term("id", IterFinal + "_" + IFinal + "_" + j + "_" + k)); } WriterFinal.ForceMerge(1); } } catch (Exception t) { OuterInstance.SetFailed(); Console.WriteLine(Thread.CurrentThread.Name + ": hit exception"); Console.WriteLine(t.StackTrace); } }
public void DoTest(StringBuilder failQueries) { bool pass = false; SrndQuery lq = null; try { lq = Parser.QueryParser.Parse(queryText); if (verbose) { Console.WriteLine("Query: " + queryText + "\nParsed as: " + lq.ToString()); } } catch (ParseException e) { if (verbose) { Console.WriteLine("Parse exception for query:\n" + queryText + "\n" + e.Message); } pass = true; } if (!pass) { failQueries.append(queryText); failQueries.append("\nParsed as: "); failQueries.append(lq.toString()); failQueries.append("\n"); } }
private static void TimedAddAndPop <T>(PriorityQueue <T> pq, T[] elements) { int size = (int)elements.size(); DateTime start, end; TimeSpan total; start = DateTime.Now; AddElements(pq, elements); end = DateTime.Now; total = end - start; Console.WriteLine("Total adding time: {0} ticks or {1}ms", total.Ticks, total.Milliseconds); Console.WriteLine("Average time per add: {0} ticks", total.Ticks / size); start = DateTime.Now; PopElements(pq); end = DateTime.Now; total = end - start; Console.WriteLine("Total popping time: {0} ticks or {1}ms", total.Ticks, total.Milliseconds); Console.WriteLine("Average time per pop: {0} ticks", total.Ticks / size); }
public virtual void TestNoExtraFiles() { Directory directory = NewDirectory(); IndexWriter writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); for (int iter = 0; iter < 7; iter++) { if (VERBOSE) { Console.WriteLine("TEST: iter=" + iter); } for (int j = 0; j < 21; j++) { Document doc = new Document(); doc.Add(NewTextField("content", "a b c", Field.Store.NO)); writer.AddDocument(doc); } writer.Dispose(); TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles"); // Reopen writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2)); } writer.Dispose(); directory.Dispose(); }
public override void Run() { try { Document doc = new Document(); Field field = NewTextField("field", "testData", Field.Store.YES); doc.Add(field); using (IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION OuterInstance, #endif TEST_VERSION_CURRENT, new MockAnalyzer(Random)))) { if (IwConstructed.CurrentCount > 0) { IwConstructed.Signal(); } StartIndexing_Renamed.Wait(); writer.AddDocument(doc); } } catch (Exception e) { Failed = true; Failure = e; Console.WriteLine(e.ToString()); return; } }
// print debug info depending on VERBOSE private static void log(string s) { if (VERBOSE) { Console.WriteLine(s); } }
public virtual void Test() { for (int i = 0; i < tests.Length; i += 3) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); Debug.Assert(f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); Debug.Assert(dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); Debug.Assert(affEntry != null); using (Stream dictionary = dicEntry.Open()) { using (Stream affix = affEntry.Open()) { Dictionary dic = new Dictionary(affix, dictionary); Console.WriteLine(tests[i] + "\t" + RamUsageEstimator.HumanSizeOf(dic) + "\t(" + "words=" + RamUsageEstimator.HumanSizeOf(dic.words) + ", " + "flags=" + RamUsageEstimator.HumanSizeOf(dic.flagLookup) + ", " + "strips=" + RamUsageEstimator.HumanSizeOf(dic.stripData) + ", " + "conditions=" + RamUsageEstimator.HumanSizeOf(dic.patterns) + ", " + "affixData=" + RamUsageEstimator.HumanSizeOf(dic.affixData) + ", " + "prefixes=" + RamUsageEstimator.HumanSizeOf(dic.prefixes) + ", " + "suffixes=" + RamUsageEstimator.HumanSizeOf(dic.suffixes) + ")"); } } } } } }
public virtual void TestBadPrefixTreePrune() { trie = new QuadPrefixTree(ctx, 12); TermQueryPrefixTreeStrategy strategy = new TermQueryPrefixTreeStrategy(trie, "geo"); Document doc = new Document(); doc.Add(new TextField("id", "1", Field.Store.YES)); IShape area = ctx.MakeRectangle(-122.82, -122.78, 48.54, 48.56); Field[] fields = strategy.CreateIndexableFields(area, 0.025); foreach (Field field in fields) { doc.Add(field); } AddDocument(doc); IPoint upperleft = ctx.MakePoint(-122.88, 48.54); IPoint lowerright = ctx.MakePoint(-122.82, 48.62); Query query = strategy.MakeQuery(new SpatialArgs(SpatialOperation.Intersects, ctx.MakeRectangle(upperleft, lowerright))); Commit(); TopDocs search = indexSearcher.Search(query, 10); ScoreDoc[] scoreDocs = search.ScoreDocs; foreach (ScoreDoc scoreDoc in scoreDocs) { Console.WriteLine(indexSearcher.Doc(scoreDoc.Doc)); } assertEquals(1, search.TotalHits); }
public void DoTest() { if (verbose) { Console.WriteLine(""); Console.WriteLine("Query: " + queryText); } SrndQuery lq = Parser.QueryParser.Parse(queryText); /* if (verbose) System.out.println("Srnd: " + lq.toString()); */ Search.Query query = lq.MakeLuceneQueryField(fieldName, qf); /* if (verbose) System.out.println("Lucene: " + query.toString()); */ TestCollector tc = new TestCollector(this); using (IndexReader reader = DirectoryReader.Open(dBase.Db)) { IndexSearcher searcher = new IndexSearcher(reader); searcher.Search(query, tc); } tc.CheckNrHits(); }
/// <summary> /// Run the task, record statistics. /// </summary> /// <param name="reportStats"></param> /// <returns>Number of work items done by this task.</returns> public int RunAndMaybeStats(bool reportStats) { int count; if (!reportStats || ShouldNotRecordStats) { Setup(); count = DoLogic(); count = disableCounting ? 0 : count; TearDown(); return(count); } if (reportStats && depth <= maxDepthLogStart && !ShouldNeverLogAtStart) { Console.WriteLine("------------> starting task: " + GetName()); } Setup(); Points pnts = runData.Points; TaskStats ts = pnts.MarkTaskStart(this, runData.Config.RoundNumber); count = DoLogic(); count = disableCounting ? 0 : count; pnts.MarkTaskEnd(ts, count); TearDown(); return(count); }
private void AssertSumDocFreq(IndexReader ir) { // compute sumDocFreq across all fields Fields fields = MultiFields.GetFields(ir); foreach (string f in fields) { Terms terms = fields.GetTerms(f); long sumDocFreq = terms.SumDocFreq; if (sumDocFreq == -1) { if (VERBOSE) { Console.WriteLine("skipping field: " + f + ", codec does not support sumDocFreq"); } continue; } long computedSumDocFreq = 0; TermsEnum termsEnum = terms.GetIterator(null); while (termsEnum.Next() != null) { computedSumDocFreq += termsEnum.DocFreq; } Assert.AreEqual(computedSumDocFreq, sumDocFreq); } }
protected override void DoAfterWriter(TaskScheduler es) { double minReopenSec = 0.01 + 0.05 * Random.NextDouble(); double maxReopenSec = minReopenSec * (1.0 + 10 * Random.NextDouble()); if (VERBOSE) { Console.WriteLine("TEST: make SearcherManager maxReopenSec=" + maxReopenSec + " minReopenSec=" + minReopenSec); } genWriter = new TrackingIndexWriter(m_writer); SearcherFactory sf = new SearcherFactoryAnonymousInnerClassHelper(this, es); nrtNoDeletes = new SearcherManager(m_writer, false, sf); nrtDeletes = new SearcherManager(m_writer, true, sf); nrtDeletesThread = new ControlledRealTimeReopenThread <IndexSearcher>(genWriter, nrtDeletes, maxReopenSec, minReopenSec); nrtDeletesThread.Name = "NRTDeletes Reopen Thread"; #if !NETSTANDARD1_6 nrtDeletesThread.Priority = (ThreadPriority)Math.Min((int)Thread.CurrentThread.Priority + 2, (int)ThreadPriority.Highest); #endif nrtDeletesThread.IsBackground = (true); nrtDeletesThread.Start(); nrtNoDeletesThread = new ControlledRealTimeReopenThread <IndexSearcher>(genWriter, nrtNoDeletes, maxReopenSec, minReopenSec); nrtNoDeletesThread.Name = "NRTNoDeletes Reopen Thread"; #if !NETSTANDARD1_6 nrtNoDeletesThread.Priority = (ThreadPriority)Math.Min((int)Thread.CurrentThread.Priority + 2, (int)ThreadPriority.Highest); #endif nrtNoDeletesThread.IsBackground = (true); nrtNoDeletesThread.Start(); }