static void Time1(int[][] array) { int iter = array.Length; BigInt32Array bigArray = new BigInt32Array(max); Thread[] threads = new Thread[iter]; RunnerThread[] threadStates = new RunnerThread[iter]; for (int i = 0; i < iter; ++i) { threadStates[i] = new RunnerThread(array[i], bigArray); threads[i] = new Thread(new ThreadStart(threadStates[i].Run)); } foreach (Thread t in threads) { t.Start(); } }
public void TestDefaultIntFacetIterator() { string format = "00"; List <Int32FacetIterator> list = new List <Int32FacetIterator>(); for (int seg = 0; seg < 5; seg++) { TermInt32List tsl1 = new TermInt32List(format); int limit = 25; BigInt32Array count = new BigInt32Array(limit); string[] terms = new string[limit]; for (int i = limit - 1; i >= 0; i--) { terms[i] = i.ToString(format); } Array.Sort(terms); for (int i = 0; i < limit; i++) { tsl1.Add(terms[i]); count.Add(i, i); } tsl1.Seal(); DefaultInt32FacetIterator itr1 = new DefaultInt32FacetIterator(tsl1, count, limit, true); list.Add(itr1); } CombinedInt32FacetIterator ctr = new CombinedInt32FacetIterator(list); string result = ""; while (ctr.HasNext()) { ctr.Next(); result += (ctr.Facet + ":" + ctr.Count + " "); } string expected = "1:5 2:10 3:15 4:20 5:25 6:30 7:35 8:40 9:45 10:50 11:55 12:60 13:65 14:70 15:75 16:80 17:85 18:90 19:95 20:100 21:105 22:110 23:115 24:120 "; Assert.AreEqual(expected, result); }
public void TestDefaultFacetIterator() { TermStringList tsl1 = new TermStringList(); tsl1.Add("i"); tsl1.Add("m"); tsl1.Seal(); BigInt32Array count = new BigInt32Array(2); count.Add(0, 1); count.Add(1, 2); DefaultFacetIterator itr1 = new DefaultFacetIterator(tsl1, count, 2, false); TermStringList tsl2 = new TermStringList(); tsl2.Add("i"); tsl2.Add("m"); tsl2.Seal(); BigInt32Array count2 = new BigInt32Array(2); count2.Add(0, 1); count2.Add(1, 5); DefaultFacetIterator itr2 = new DefaultFacetIterator(tsl2, count2, 2, true); List <FacetIterator> list = new List <FacetIterator>(); list.Add(itr1); list.Add(itr2); CombinedFacetIterator ctr = new CombinedFacetIterator(list); string result = ""; while (ctr.HasNext()) { ctr.Next(); result += ctr.Facet; result += ctr.Count; } Assert.AreEqual("i1m7", result, "result should be i1m7"); }
public override void CollectAll() { m_count = BigInt32Array.FromArray(base.m_dataCache.Freqs); }
public virtual void CollectAll() { m_count = BigInt32Array.FromArray(m_dataCache.Freqs); }
public override sealed void CollectAll() { m_count = BigInt32Array.FromArray(m_dataCache.Freqs); }
public RunnerThread(int[] a, BigInt32Array b) { array = a; bigarray = b; }
public override FacetDataCache Load(BoboSegmentReader reader) { TreeDictionary <object, List <int> > dataMap = null; List <int> docList = null; int nullMinId = -1; int nullMaxId = -1; int nullFreq = 0; int doc = -1; IBits liveDocs = reader.LiveDocs; for (int i = 0; i < reader.MaxDoc; ++i) { if (liveDocs != null && !liveDocs.Get(i)) { continue; } doc = i; object val = m_facetDataFetcher.Fetch(reader, doc); if (val == null) { if (nullMinId < 0) { nullMinId = doc; } nullMaxId = doc; ++nullFreq; continue; } if (dataMap == null) { // Initialize. if (val is long[]) { if (m_termListFactory == null) { m_termListFactory = new TermFixedLengthInt64ArrayListFactory( ((long[])val).Length); } dataMap = new TreeDictionary <object, List <int> >(new VirtualSimpleFacetHandlerInt16ArrayComparer()); } else if (val is IComparable) { dataMap = new TreeDictionary <object, List <int> >(); } else { dataMap = new TreeDictionary <object, List <int> >(new VirtualSimpleFacetHandlerObjectComparer()); } } if (dataMap.Contains(val)) { docList = dataMap[val]; } else { docList = null; } if (docList == null) { docList = new List <int>(); dataMap[val] = docList; } docList.Add(doc); } m_facetDataFetcher.Cleanup(reader); int maxDoc = reader.MaxDoc; int size = dataMap == null ? 1 : (dataMap.Count + 1); BigSegmentedArray order = new BigInt32Array(maxDoc); ITermValueList list = m_termListFactory == null ? new TermStringList(size) : m_termListFactory.CreateTermList(size); int[] freqs = new int[size]; int[] minIDs = new int[size]; int[] maxIDs = new int[size]; list.Add(null); freqs[0] = nullFreq; minIDs[0] = nullMinId; maxIDs[0] = nullMaxId; if (dataMap != null) { int i = 1; int?docId; foreach (var entry in dataMap) { list.Add(list.Format(entry.Key)); docList = entry.Value; freqs[i] = docList.Count; minIDs[i] = docList.Get(0, int.MinValue); while ((docId = docList.Poll(int.MinValue)) != int.MinValue) { doc = (int)docId; order.Add(doc, i); } maxIDs[i] = doc; ++i; } } list.Seal(); FacetDataCache dataCache = new FacetDataCache(order, list, freqs, minIDs, maxIDs, TermCountSize.Large); return(dataCache); }
public void CollectAll() { m_count = BigInt32Array.FromArray(m_dataCache.Freqs); m_countLength = m_dataCache.Freqs.Length; }
public override FacetDataCache Load(BoboSegmentReader reader) { int maxDoc = reader.MaxDoc; BigInt32Array order = new BigInt32Array(maxDoc); ITermValueList mterms = m_termListFactory == null ? new TermStringList() : m_termListFactory.CreateTermList(); List <int> minIDList = new List <int>(); List <int> maxIDList = new List <int>(); List <int> freqList = new List <int>(); int t = 0; // current term number mterms.Add(null); minIDList.Add(-1); maxIDList.Add(-1); freqList.Add(0); t++; Terms terms = reader.GetTerms(m_indexFieldName); if (terms != null) { TermsEnum termsEnum = terms.GetIterator(null); BytesRef text; while ((text = termsEnum.Next()) != null) { // store term text // we expect that there is at most one term per document if (t > MAX_VAL_COUNT) { throw new IOException("maximum number of value cannot exceed: " + MAX_VAL_COUNT); } string val = text.Utf8ToString(); mterms.Add(val); int bit = (0x00000001 << (t - 1)); Term term = new Term(m_indexFieldName, val); DocsEnum docsEnum = reader.GetTermDocsEnum(term); //freqList.add(termEnum.docFreq()); // removed because the df doesn't take into account the // num of deletedDocs int df = 0; int minID = -1; int maxID = -1; int docID = -1; while ((docID = docsEnum.NextDoc()) != DocsEnum.NO_MORE_DOCS) { df++; order.Add(docID, order.Get(docID) | bit); minID = docID; while (docsEnum.NextDoc() != DocsEnum.NO_MORE_DOCS) { docID = docsEnum.DocID; df++; order.Add(docID, order.Get(docID) | bit); } maxID = docID; } freqList.Add(df); minIDList.Add(minID); maxIDList.Add(maxID); t++; } } mterms.Seal(); return(new FacetDataCache(order, mterms, freqList.ToArray(), minIDList.ToArray(), maxIDList.ToArray(), TermCountSize.Large)); }