public void Run() { long start = System.Environment.TickCount; foreach (int val in array) { int x = bigarray.Get(val); } long end = System.Environment.TickCount; Console.WriteLine("time: " + (end - start)); }
public override FacetDataCache Load(BoboSegmentReader reader) { int maxDoc = reader.MaxDoc; BigInt32Array order = new BigInt32Array(maxDoc); ITermValueList mterms = m_termListFactory == null ? new TermStringList() : m_termListFactory.CreateTermList(); List <int> minIDList = new List <int>(); List <int> maxIDList = new List <int>(); List <int> freqList = new List <int>(); int t = 0; // current term number mterms.Add(null); minIDList.Add(-1); maxIDList.Add(-1); freqList.Add(0); t++; Terms terms = reader.GetTerms(m_indexFieldName); if (terms != null) { TermsEnum termsEnum = terms.GetIterator(null); BytesRef text; while ((text = termsEnum.Next()) != null) { // store term text // we expect that there is at most one term per document if (t > MAX_VAL_COUNT) { throw new IOException("maximum number of value cannot exceed: " + MAX_VAL_COUNT); } string val = text.Utf8ToString(); mterms.Add(val); int bit = (0x00000001 << (t - 1)); Term term = new Term(m_indexFieldName, val); DocsEnum docsEnum = reader.GetTermDocsEnum(term); //freqList.add(termEnum.docFreq()); // removed because the df doesn't take into account the // num of deletedDocs int df = 0; int minID = -1; int maxID = -1; int docID = -1; while ((docID = docsEnum.NextDoc()) != DocsEnum.NO_MORE_DOCS) { df++; order.Add(docID, order.Get(docID) | bit); minID = docID; while (docsEnum.NextDoc() != DocsEnum.NO_MORE_DOCS) { docID = docsEnum.DocID; df++; order.Add(docID, order.Get(docID) | bit); } maxID = docID; } freqList.Add(df); minIDList.Add(minID); maxIDList.Add(maxID); t++; } } mterms.Seal(); return(new FacetDataCache(order, mterms, freqList.ToArray(), minIDList.ToArray(), maxIDList.ToArray(), TermCountSize.Large)); }