public override void SetDocument(int docID) { currentSubIndex = ReaderUtil.SubIndex(docID, docStarts); values[currentSubIndex].SetDocument(docID - docStarts[currentSubIndex]); }
public override long Get(int docID) { int subIndex = ReaderUtil.SubIndex(docID, starts); return(values[subIndex].Get(docID - starts[subIndex])); }
public override void Get(int docID, BytesRef result) { int subIndex = ReaderUtil.SubIndex(docID, starts); values[subIndex].Get(docID - starts[subIndex], result); }
private void assertCompressedFields29(Directory dir, bool shouldStillBeCompressed) { int count = 0; int TEXT_PLAIN_LENGTH = TEXT_TO_COMPRESS.Length * 2; // FieldSelectorResult.SIZE returns 2*number_of_chars for String fields: int BINARY_PLAIN_LENGTH = BINARY_TO_COMPRESS.Length; IndexReader reader = IndexReader.Open(dir, true); try { // look into sub readers and check if raw merge is on/off var readers = new System.Collections.Generic.List <IndexReader>(); ReaderUtil.GatherSubReaders(readers, reader); foreach (IndexReader ir in readers) { FieldsReader fr = ((SegmentReader)ir).GetFieldsReader(); Assert.IsTrue(shouldStillBeCompressed != fr.CanReadRawDocs(), "for a 2.9 index, FieldsReader.canReadRawDocs() must be false and other way round for a trunk index"); } // test that decompression works correctly for (int i = 0; i < reader.MaxDoc; i++) { if (!reader.IsDeleted(i)) { Document d = reader.Document(i); if (d.Get("content3") != null) { continue; } count++; IFieldable compressed = d.GetFieldable("compressed"); if (int.Parse(d.Get("id")) % 2 == 0) { Assert.IsFalse(compressed.IsBinary); Assert.AreEqual(TEXT_TO_COMPRESS, compressed.StringValue, "incorrectly decompressed string"); } else { Assert.IsTrue(compressed.IsBinary); Assert.IsTrue(BINARY_TO_COMPRESS.SequenceEqual(compressed.GetBinaryValue()), "incorrectly decompressed binary"); } } } //check if field was decompressed after optimize for (int i = 0; i < reader.MaxDoc; i++) { if (!reader.IsDeleted(i)) { Document d = reader.Document(i, new AnonymousFieldSelector()); if (d.Get("content3") != null) { continue; } count++; // read the size from the binary value using BinaryReader (this prevents us from doing the shift ops ourselves): // ugh, Java uses Big-Endian streams, so we need to do it manually. byte[] encodedSize = d.GetFieldable("compressed").GetBinaryValue().Take(4).Reverse().ToArray(); int actualSize = BitConverter.ToInt32(encodedSize, 0); int compressedSize = int.Parse(d.Get("compressedSize")); bool binary = int.Parse(d.Get("id")) % 2 > 0; int shouldSize = shouldStillBeCompressed ? compressedSize : (binary ? BINARY_PLAIN_LENGTH : TEXT_PLAIN_LENGTH); Assert.AreEqual(shouldSize, actualSize, "size incorrect"); if (!shouldStillBeCompressed) { Assert.IsFalse(compressedSize == actualSize, "uncompressed field should have another size than recorded in index"); } } } Assert.AreEqual(34 * 2, count, "correct number of tests"); } finally { reader.Dispose(); } }