internal Lucene40DocValuesReader(SegmentReadState state, string filename, string legacyKey) { this.State = state; this.LegacyKey = legacyKey; this.Dir = new CompoundFileDirectory(state.Directory, filename, state.Context, false); RamBytesUsed_Renamed = new AtomicLong(RamUsageEstimator.ShallowSizeOf(this.GetType())); }
internal DirectDocValuesProducer(SegmentReadState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension) { maxDoc = state.SegmentInfo.DocCount; string metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension); // read in the entries from the metadata file. ChecksumIndexInput @in = state.Directory.OpenChecksumInput(metaName, state.Context); ramBytesUsed = new AtomicLong(RamUsageEstimator.ShallowSizeOfInstance(this.GetType())); bool success = false; try { version = CodecUtil.CheckHeader(@in, metaCodec, VERSION_START, VERSION_CURRENT); ReadFields(@in); if (version >= VERSION_CHECKSUM) { CodecUtil.CheckFooter(@in); } else { CodecUtil.CheckEOF(@in); } success = true; } finally { if (success) { IOUtils.Close(@in); } else { IOUtils.CloseWhileHandlingException(@in); } } success = false; try { string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension); data = state.Directory.OpenInput(dataName, state.Context); int version2 = CodecUtil.CheckHeader(data, dataCodec, VERSION_START, VERSION_CURRENT); if (version != version2) { throw new CorruptIndexException("Format versions mismatch"); } success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(this.data); } } }
public BufferedUpdates() { this.BytesUsed = new AtomicLong(); Terms = new Dictionary<Term, int?>(); }
/// <summary> /// expert: instantiates a new reader </summary> protected internal Lucene45DocValuesProducer(SegmentReadState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension) { string metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension); // read in the entries from the metadata file. ChecksumIndexInput @in = state.Directory.OpenChecksumInput(metaName, state.Context); this.MaxDoc = state.SegmentInfo.DocCount; bool success = false; try { Version = CodecUtil.CheckHeader(@in, metaCodec, Lucene45DocValuesFormat.VERSION_START, Lucene45DocValuesFormat.VERSION_CURRENT); Numerics = new Dictionary<int, NumericEntry>(); Ords = new Dictionary<int, NumericEntry>(); OrdIndexes = new Dictionary<int, NumericEntry>(); Binaries = new Dictionary<int, BinaryEntry>(); SortedSets = new Dictionary<int, SortedSetEntry>(); ReadFields(@in, state.FieldInfos); if (Version >= Lucene45DocValuesFormat.VERSION_CHECKSUM) { CodecUtil.CheckFooter(@in); } else { CodecUtil.CheckEOF(@in); } success = true; } finally { if (success) { IOUtils.Close(@in); } else { IOUtils.CloseWhileHandlingException(@in); } } success = false; try { string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension); Data = state.Directory.OpenInput(dataName, state.Context); int version2 = CodecUtil.CheckHeader(Data, dataCodec, Lucene45DocValuesFormat.VERSION_START, Lucene45DocValuesFormat.VERSION_CURRENT); if (Version != version2) { throw new Exception("Format versions mismatch"); } success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(this.Data); } } RamBytesUsed_Renamed = new AtomicLong(RamUsageEstimator.ShallowSizeOfInstance(this.GetType())); }
// note: just like segmentreader in 3.x, we open up all the files here (including separate norms) up front. // but we just don't do any seeks or reading yet. public Lucene3xNormsProducer(Directory dir, SegmentInfo info, FieldInfos fields, IOContext context) { Directory separateNormsDir = info.Dir; // separate norms are never inside CFS Maxdoc = info.DocCount; string segmentName = info.Name; bool success = false; try { long nextNormSeek = NORMS_HEADER.Length; //skip header (header unused for now) foreach (FieldInfo fi in fields) { if (fi.HasNorms()) { string fileName = GetNormFilename(info, fi.Number); Directory d = HasSeparateNorms(info, fi.Number) ? separateNormsDir : dir; // singleNormFile means multiple norms share this file bool singleNormFile = IndexFileNames.MatchesExtension(fileName, NORMS_EXTENSION); IndexInput normInput = null; long normSeek; if (singleNormFile) { normSeek = nextNormSeek; if (SingleNormStream == null) { SingleNormStream = d.OpenInput(fileName, context); OpenFiles.Add(SingleNormStream); } // All norms in the .nrm file can share a single IndexInput since // they are only used in a synchronized context. // If this were to change in the future, a clone could be done here. normInput = SingleNormStream; } else { normInput = d.OpenInput(fileName, context); OpenFiles.Add(normInput); // if the segment was created in 3.2 or after, we wrote the header for sure, // and don't need to do the sketchy file size check. otherwise, we check // if the size is exactly equal to maxDoc to detect a headerless file. // NOTE: remove this check in Lucene 5.0! string version = info.Version; bool isUnversioned = (version == null || StringHelper.VersionComparator.Compare(version, "3.2") < 0) && normInput.Length() == Maxdoc; if (isUnversioned) { normSeek = 0; } else { normSeek = NORMS_HEADER.Length; } } NormsDocValues norm = new NormsDocValues(this, normInput, normSeek); Norms[fi.Name] = norm; nextNormSeek += Maxdoc; // increment also if some norms are separate } } // TODO: change to a real check? see LUCENE-3619 Debug.Assert(SingleNormStream == null || nextNormSeek == SingleNormStream.Length(), SingleNormStream != null ? "len: " + SingleNormStream.Length() + " expected: " + nextNormSeek : "null"); success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(OpenFiles); } } ramBytesUsed = new AtomicLong(); }
public virtual void Test() { Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), dir); long startTime = DateTime.Now.Millisecond; // TODO: replace w/ the @nightly test data; make this // into an optional @nightly stress test Document doc = new Document(); Field body = NewTextField("body", "", Field.Store.NO); doc.Add(body); StringBuilder sb = new StringBuilder(); for (int docCount = 0; docCount < NUM_DOCS; docCount++) { int numTerms = Random().Next(10); for (int termCount = 0; termCount < numTerms; termCount++) { sb.Append(Random().NextBoolean() ? "aaa" : "bbb"); sb.Append(' '); } body.StringValue = sb.ToString(); w.AddDocument(doc); sb.Remove(0, sb.Length); } IndexReader r = w.Reader; w.Dispose(); long endTime = DateTime.Now.Millisecond; if (VERBOSE) { Console.WriteLine("BUILD took " + (endTime - startTime)); } IndexSearcher s = NewSearcher(r); AtomicBoolean failed = new AtomicBoolean(); AtomicLong netSearch = new AtomicLong(); ThreadClass[] threads = new ThreadClass[NUM_SEARCH_THREADS]; for (int threadID = 0; threadID < NUM_SEARCH_THREADS; threadID++) { threads[threadID] = new ThreadAnonymousInnerClassHelper(this, s, failed, netSearch); threads[threadID].SetDaemon(true); } foreach (ThreadClass t in threads) { t.Start(); } foreach (ThreadClass t in threads) { t.Join(); } if (VERBOSE) { Console.WriteLine(NUM_SEARCH_THREADS + " threads did " + netSearch.Get() + " searches"); } r.Dispose(); dir.Dispose(); }
public ThreadAnonymousInnerClassHelper(TestSearchWithThreads outerInstance, IndexSearcher s, AtomicBoolean failed, AtomicLong netSearch) { this.OuterInstance = outerInstance; this.s = s; this.Failed = failed; this.NetSearch = netSearch; col = new TotalHitCountCollector(); }
public virtual void Test() { // update variables int commitPercent = Random().Next(20); int softCommitPercent = Random().Next(100); // what percent of the commits are soft int deletePercent = Random().Next(50); int deleteByQueryPercent = Random().Next(25); int ndocs = AtLeast(50); int nWriteThreads = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5); int maxConcurrentCommits = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5); // number of committers at a time... needed if we want to avoid commit errors due to exceeding the max bool tombstones = Random().NextBoolean(); // query variables AtomicLong operations = new AtomicLong(AtLeast(10000)); // number of query operations to perform in total int nReadThreads = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5); InitModel(ndocs); FieldType storedOnlyType = new FieldType(); storedOnlyType.Stored = true; if (VERBOSE) { Console.WriteLine("\n"); Console.WriteLine("TEST: commitPercent=" + commitPercent); Console.WriteLine("TEST: softCommitPercent=" + softCommitPercent); Console.WriteLine("TEST: deletePercent=" + deletePercent); Console.WriteLine("TEST: deleteByQueryPercent=" + deleteByQueryPercent); Console.WriteLine("TEST: ndocs=" + ndocs); Console.WriteLine("TEST: nWriteThreads=" + nWriteThreads); Console.WriteLine("TEST: nReadThreads=" + nReadThreads); Console.WriteLine("TEST: maxConcurrentCommits=" + maxConcurrentCommits); Console.WriteLine("TEST: tombstones=" + tombstones); Console.WriteLine("TEST: operations=" + operations); Console.WriteLine("\n"); } AtomicInteger numCommitting = new AtomicInteger(); IList<ThreadClass> threads = new List<ThreadClass>(); Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); writer.DoRandomForceMergeAssert = false; writer.Commit(); Reader = DirectoryReader.Open(dir); for (int i = 0; i < nWriteThreads; i++) { ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, "WRITER" + i, commitPercent, softCommitPercent, deletePercent, deleteByQueryPercent, ndocs, maxConcurrentCommits, tombstones, operations, storedOnlyType, numCommitting, writer); threads.Add(thread); } for (int i = 0; i < nReadThreads; i++) { ThreadClass thread = new ThreadAnonymousInnerClassHelper2(this, "READER" + i, ndocs, tombstones, operations); threads.Add(thread); } foreach (ThreadClass thread in threads) { thread.Start(); } foreach (ThreadClass thread in threads) { thread.Join(); } writer.Dispose(); if (VERBOSE) { Console.WriteLine("TEST: close reader=" + Reader); } Reader.Dispose(); dir.Dispose(); }
public ThreadAnonymousInnerClassHelper2(TestStressNRT outerInstance, string str, int ndocs, bool tombstones, AtomicLong operations) : base(str) { this.OuterInstance = outerInstance; this.Ndocs = ndocs; this.Tombstones = tombstones; this.Operations = operations; rand = new Random(Random().Next()); }
public ThreadAnonymousInnerClassHelper(TestStressNRT outerInstance, string str, int commitPercent, int softCommitPercent, int deletePercent, int deleteByQueryPercent, int ndocs, int maxConcurrentCommits, bool tombstones, AtomicLong operations, FieldType storedOnlyType, AtomicInteger numCommitting, RandomIndexWriter writer) : base(str) { this.OuterInstance = outerInstance; this.CommitPercent = commitPercent; this.SoftCommitPercent = softCommitPercent; this.DeletePercent = deletePercent; this.DeleteByQueryPercent = deleteByQueryPercent; this.Ndocs = ndocs; this.MaxConcurrentCommits = maxConcurrentCommits; this.Tombstones = tombstones; this.Operations = operations; this.StoredOnlyType = storedOnlyType; this.NumCommitting = numCommitting; this.Writer = writer; rand = new Random(Random().Next()); }
public BufferedUpdates() { this.BytesUsed = new AtomicLong(); }