static void Time1(int[][] array) { int iter = array.Length; BigIntArray bigArray = new BigIntArray(max); Thread[] threads = new Thread[iter]; RunnerThread2[] threadStates = new RunnerThread2[iter]; for (int i = 0; i < iter; ++i) { threadStates[i] = new RunnerThread2(array[i], bigArray); threads[i] = new Thread(new ThreadStart(threadStates[i].Run)); } foreach (Thread t in threads) { t.Start(); } }
public void TestDefaultIntFacetIterator() { string format = "00"; List <IntFacetIterator> list = new List <IntFacetIterator>(); for (int seg = 0; seg < 5; seg++) { TermIntList tsl1 = new TermIntList(format); int limit = 25; BigIntArray count = new BigIntArray(limit); string[] terms = new string[limit]; for (int i = limit - 1; i >= 0; i--) { terms[i] = i.ToString(format); } Array.Sort(terms); for (int i = 0; i < limit; i++) { tsl1.Add(terms[i]); count.Add(i, i); } tsl1.Seal(); DefaultIntFacetIterator itr1 = new DefaultIntFacetIterator(tsl1, count, limit, true); list.Add(itr1); } CombinedIntFacetIterator ctr = new CombinedIntFacetIterator(list); string result = ""; while (ctr.HasNext()) { ctr.Next(); result += (ctr.Facet + ":" + ctr.Count + " "); } string expected = "1:5 2:10 3:15 4:20 5:25 6:30 7:35 8:40 9:45 10:50 11:55 12:60 13:65 14:70 15:75 16:80 17:85 18:90 19:95 20:100 21:105 22:110 23:115 24:120 "; Assert.AreEqual(expected, result); }
public void TestDefaultFacetIterator() { TermStringList tsl1 = new TermStringList(); tsl1.Add("i"); tsl1.Add("m"); tsl1.Seal(); BigIntArray count = new BigIntArray(2); count.Add(0, 1); count.Add(1, 2); DefaultFacetIterator itr1 = new DefaultFacetIterator(tsl1, count, 2, false); TermStringList tsl2 = new TermStringList(); tsl2.Add("i"); tsl2.Add("m"); tsl2.Seal(); BigIntArray count2 = new BigIntArray(2); count2.Add(0, 1); count2.Add(1, 5); DefaultFacetIterator itr2 = new DefaultFacetIterator(tsl2, count2, 2, true); List <FacetIterator> list = new List <FacetIterator>(); list.Add(itr1); list.Add(itr2); CombinedFacetIterator ctr = new CombinedFacetIterator(list); string result = ""; while (ctr.HasNext()) { ctr.Next(); result += ctr.Facet; result += ctr.Count; } Assert.AreEqual("i1m7", result, "result should be i1m7"); }
public virtual FacetCountStatistics GenerateStatistic(IFacetCountCollector countHitCollector, int n) { return(GenerateStatistic(BigIntArray.ToArray(countHitCollector.GetCountDistribution()), n)); }
public override sealed void CollectAll() { _count = BigIntArray.FromArray(_dataCache.Freqs); }
public virtual void CollectAll() { _count = BigIntArray.FromArray(_dataCache.Freqs); }
public void CollectAll() { _count = BigIntArray.FromArray(_dataCache.Freqs); _countLength = _dataCache.Freqs.Length; }
public override FacetDataCache Load(BoboIndexReader reader) { int maxDoc = reader.MaxDoc; BigIntArray order = new BigIntArray(maxDoc); ITermValueList mterms = _termListFactory == null ? new TermStringList() : _termListFactory.CreateTermList(); List <int> minIDList = new List <int>(); List <int> maxIDList = new List <int>(); List <int> freqList = new List <int>(); TermDocs termDocs = null; TermEnum termEnum = null; int t = 0; // current term number mterms.Add(null); minIDList.Add(-1); maxIDList.Add(-1); freqList.Add(0); t++; try { termDocs = reader.TermDocs(); termEnum = reader.Terms(new Term(_indexFieldName, "")); do { if (termEnum == null) { break; } Term term = termEnum.Term; if (term == null || !_indexFieldName.Equals(term.Field)) { break; } // store term text // we expect that there is at most one term per document if (t > MAX_VAL_COUNT) { throw new IOException("maximum number of value cannot exceed: " + MAX_VAL_COUNT); } string val = term.Text; mterms.Add(val); int bit = (0x00000001 << (t - 1)); termDocs.Seek(termEnum); //freqList.add(termEnum.docFreq()); // removed because the df doesn't take into account the num of deletedDocs int df = 0; int minID = -1; int maxID = -1; if (termDocs.Next()) { df++; int docid = termDocs.Doc; order.Add(docid, order.Get(docid) | bit); minID = docid; while (termDocs.Next()) { df++; docid = termDocs.Doc; order.Add(docid, order.Get(docid) | bit); } maxID = docid; } freqList.Add(df); minIDList.Add(minID); maxIDList.Add(maxID); t++; } while (termEnum.Next()); } finally { try { if (termDocs != null) { termDocs.Dispose(); } } finally { if (termEnum != null) { termEnum.Dispose(); } } } mterms.Seal(); return(new FacetDataCache(order, mterms, freqList.ToArray(), minIDList.ToArray(), maxIDList.ToArray(), TermCountSize.Large)); }
public RunnerThread2(int[] a, BigIntArray b) { array = a; bigarray = b; }
public override FacetDataCache Load(BoboIndexReader reader) { int doc = -1; C5.TreeDictionary <object, List <int> > dataMap = null; List <int> docList = null; int nullMinId = -1; int nullMaxId = -1; int nullFreq = 0; TermDocs termDocs = reader.TermDocs(null); try { while (termDocs.Next()) { doc = termDocs.Doc; object val = _facetDataFetcher.Fetch(reader, doc); if (val == null) { if (nullMinId < 0) { nullMinId = doc; } nullMaxId = doc; ++nullFreq; continue; } if (dataMap == null) { // Initialize. if (val is long[]) { if (_termListFactory == null) { _termListFactory = new TermFixedLengthLongArrayListFactory( ((long[])val).Length); } dataMap = new C5.TreeDictionary <object, List <int> >(new VirtualSimpleFacetHandlerLongArrayComparator()); } else if (val is IComparable) { // NOTE: In .NET 3.5, the default constructor doesn't work in this case. We therefore have a custom type // that converts the objects to IComparable before comparing them, falling back to a string comparison // if they don't convert. This differs from the Java implementation that uses the default constructor. dataMap = new C5.TreeDictionary <object, List <int> >(new VirtualSimpleFacetHandlerComparableComparator()); } else { dataMap = new C5.TreeDictionary <object, List <int> >(new VirtualSimpleFacetHandlerObjectComparator()); } } if (dataMap.Contains(val)) { docList = dataMap[val]; } else { docList = null; } if (docList == null) { docList = new List <int>(); dataMap[val] = docList; } docList.Add(doc); } } finally { termDocs.Dispose(); } _facetDataFetcher.Cleanup(reader); int maxDoc = reader.MaxDoc; int size = dataMap == null ? 1 : (dataMap.Count + 1); BigSegmentedArray order = new BigIntArray(maxDoc); ITermValueList list = _termListFactory == null ? new TermStringList(size) : _termListFactory.CreateTermList(size); int[] freqs = new int[size]; int[] minIDs = new int[size]; int[] maxIDs = new int[size]; list.Add(null); freqs[0] = nullFreq; minIDs[0] = nullMinId; maxIDs[0] = nullMaxId; if (dataMap != null) { int i = 1; int?docId; foreach (var entry in dataMap) { list.Add(list.Format(entry.Key)); docList = entry.Value; freqs[i] = docList.Count; minIDs[i] = docList.Get(0, int.MinValue); while ((docId = docList.Poll(int.MinValue)) != int.MinValue) { doc = (int)docId; order.Add(doc, i); } maxIDs[i] = doc; ++i; } } list.Seal(); FacetDataCache dataCache = new FacetDataCache(order, list, freqs, minIDs, maxIDs, TermCountSize.Large); return(dataCache); }