public virtual void TestSanity()
        {
            IFieldCache cache = FieldCache.DEFAULT;

            cache.PurgeAllCaches();

            cache.GetDoubles(ReaderA, "theDouble", false);
            cache.GetDoubles(ReaderA, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);
            cache.GetDoubles(ReaderAclone, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);
            cache.GetDoubles(ReaderB, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);

            cache.GetInts(ReaderX, "theInt", false);
            cache.GetInts(ReaderX, "theInt", FieldCache.DEFAULT_INT_PARSER, false);

            // // //

            Insanity[] insanity = FieldCacheSanityChecker.CheckSanity(cache.CacheEntries);

            if (0 < insanity.Length)
            {
                DumpArray(TestClass.Name + "#" + TestName + " INSANITY", insanity, Console.Error);
            }

            Assert.AreEqual(0, insanity.Length, "shouldn't be any cache insanity");
            cache.PurgeAllCaches();
        }
Ejemplo n.º 2
0
        public virtual void TestDocsWithField()
        {
            IFieldCache cache = FieldCache.DEFAULT;

            cache.PurgeAllCaches();
            Assert.AreEqual(0, cache.CacheEntries.Length);
            cache.GetDoubles(Reader, "theDouble", true);

            // The double[] takes two slots (one w/ null parser, one
            // w/ real parser), and docsWithField should also
            // have been populated:
            Assert.AreEqual(3, cache.CacheEntries.Length);
            Bits bits = cache.GetDocsWithField(Reader, "theDouble");

            // No new entries should appear:
            Assert.AreEqual(3, cache.CacheEntries.Length);
            Assert.IsTrue(bits is Bits_MatchAllBits);

            Ints ints = cache.GetInts(Reader, "sparse", true);

            Assert.AreEqual(6, cache.CacheEntries.Length);
            Bits docsWithField = cache.GetDocsWithField(Reader, "sparse");

            Assert.AreEqual(6, cache.CacheEntries.Length);
            for (int i = 0; i < docsWithField.Length(); i++)
            {
                if (i % 2 == 0)
                {
                    Assert.IsTrue(docsWithField.Get(i));
                    Assert.AreEqual(i, ints.Get(i));
                }
                else
                {
                    Assert.IsFalse(docsWithField.Get(i));
                }
            }

            Ints numInts = cache.GetInts(Reader, "numInt", Random().NextBoolean());

            docsWithField = cache.GetDocsWithField(Reader, "numInt");
            for (int i = 0; i < docsWithField.Length(); i++)
            {
                if (i % 2 == 0)
                {
                    Assert.IsTrue(docsWithField.Get(i));
                    Assert.AreEqual(i, numInts.Get(i));
                }
                else
                {
                    Assert.IsFalse(docsWithField.Get(i));
                }
            }
        }
Ejemplo n.º 3
0
 public override void Run()
 {
     try
     {
         while (!Failed.Get())
         {
             int op = Random().Next(3);
             if (op == 0)
             {
                 // Purge all caches & resume, once all
                 // threads get here:
                 Restart.SignalAndWait();
                 if (Iters.Get() >= NUM_ITER)
                 {
                     break;
                 }
             }
             else if (op == 1)
             {
                 Bits docsWithField = Cache.GetDocsWithField(Reader, "sparse");
                 for (int i = 0; i < docsWithField.Length(); i++)
                 {
                     Assert.AreEqual(i % 2 == 0, docsWithField.Get(i));
                 }
             }
             else
             {
                 Ints ints          = Cache.GetInts(Reader, "sparse", true);
                 Bits docsWithField = Cache.GetDocsWithField(Reader, "sparse");
                 for (int i = 0; i < docsWithField.Length(); i++)
                 {
                     if (i % 2 == 0)
                     {
                         Assert.IsTrue(docsWithField.Get(i));
                         Assert.AreEqual(i, ints.Get(i));
                     }
                     else
                     {
                         Assert.IsFalse(docsWithField.Get(i));
                     }
                 }
             }
         }
     }
     catch (Exception t)
     {
         Failed.Set(true);
         throw new Exception(t.Message, t);
     }
 }
        public virtual void TestInsanity1()
        {
            IFieldCache cache = FieldCache.DEFAULT;

            cache.PurgeAllCaches();

            cache.GetInts(ReaderX, "theInt", FieldCache.DEFAULT_INT_PARSER, false);
            cache.GetTerms(ReaderX, "theInt", false);
            cache.GetBytes(ReaderX, "theByte", false);

            // // //

            Insanity[] insanity = FieldCacheSanityChecker.CheckSanity(cache.CacheEntries);

            Assert.AreEqual(1, insanity.Length, "wrong number of cache errors");
            Assert.AreEqual(InsanityType.VALUEMISMATCH, insanity[0].Type, "wrong type of cache error");
            Assert.AreEqual(2, insanity[0].CacheEntries.Length, "wrong number of entries in cache error");

            // we expect bad things, don't let tearDown complain about them
            cache.PurgeAllCaches();
        }
Ejemplo n.º 5
0
        public virtual void TestNonIndexedFields()
        {
            Directory         dir = NewDirectory();
            RandomIndexWriter iw  = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
            Document          doc = new Document();

            doc.Add(new StoredField("bogusbytes", "bogus"));
            doc.Add(new StoredField("bogusshorts", "bogus"));
            doc.Add(new StoredField("bogusints", "bogus"));
            doc.Add(new StoredField("boguslongs", "bogus"));
            doc.Add(new StoredField("bogusfloats", "bogus"));
            doc.Add(new StoredField("bogusdoubles", "bogus"));
            doc.Add(new StoredField("bogusterms", "bogus"));
            doc.Add(new StoredField("bogustermsindex", "bogus"));
            doc.Add(new StoredField("bogusmultivalued", "bogus"));
            doc.Add(new StoredField("bogusbits", "bogus"));
            iw.AddDocument(doc);
            DirectoryReader ir = iw.Reader;

            iw.Dispose();

            AtomicReader ar = GetOnlySegmentReader(ir);

            IFieldCache cache = FieldCache.DEFAULT;

            cache.PurgeAllCaches();
            Assert.AreEqual(0, cache.CacheEntries.Length);

            Bytes bytes = cache.GetBytes(ar, "bogusbytes", true);

            Assert.AreEqual(0, bytes.Get(0));

            Shorts shorts = cache.GetShorts(ar, "bogusshorts", true);

            Assert.AreEqual(0, shorts.Get(0));

            Ints ints = cache.GetInts(ar, "bogusints", true);

            Assert.AreEqual(0, ints.Get(0));

            Longs longs = cache.GetLongs(ar, "boguslongs", true);

            Assert.AreEqual(0, longs.Get(0));

            Floats floats = cache.GetFloats(ar, "bogusfloats", true);

            Assert.AreEqual(0, floats.Get(0), 0.0f);

            Doubles doubles = cache.GetDoubles(ar, "bogusdoubles", true);

            Assert.AreEqual(0, doubles.Get(0), 0.0D);

            BytesRef        scratch  = new BytesRef();
            BinaryDocValues binaries = cache.GetTerms(ar, "bogusterms", true);

            binaries.Get(0, scratch);
            Assert.AreEqual(0, scratch.Length);

            SortedDocValues sorted = cache.GetTermsIndex(ar, "bogustermsindex");

            Assert.AreEqual(-1, sorted.GetOrd(0));
            sorted.Get(0, scratch);
            Assert.AreEqual(0, scratch.Length);

            SortedSetDocValues sortedSet = cache.GetDocTermOrds(ar, "bogusmultivalued");

            sortedSet.Document = 0;
            Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, sortedSet.NextOrd());

            Bits bits = cache.GetDocsWithField(ar, "bogusbits");

            Assert.IsFalse(bits.Get(0));

            // check that we cached nothing
            Assert.AreEqual(0, cache.CacheEntries.Length);
            ir.Dispose();
            dir.Dispose();
        }
Ejemplo n.º 6
0
        public virtual void Test()
        {
            IFieldCache cache = FieldCache.DEFAULT;

            FieldCache.Doubles doubles = cache.GetDoubles(Reader, "theDouble", Random().NextBoolean());
            Assert.AreSame(doubles, cache.GetDoubles(Reader, "theDouble", Random().NextBoolean()), "Second request to cache return same array");
            Assert.AreSame(doubles, cache.GetDoubles(Reader, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
            for (int i = 0; i < NUM_DOCS; i++)
            {
                Assert.IsTrue(doubles.Get(i) == (double.MaxValue - i), doubles.Get(i) + " does not equal: " + (double.MaxValue - i));
            }

            FieldCache.Longs longs = cache.GetLongs(Reader, "theLong", Random().NextBoolean());
            Assert.AreSame(longs, cache.GetLongs(Reader, "theLong", Random().NextBoolean()), "Second request to cache return same array");
            Assert.AreSame(longs, cache.GetLongs(Reader, "theLong", FieldCache.DEFAULT_LONG_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
            for (int i = 0; i < NUM_DOCS; i++)
            {
                Assert.IsTrue(longs.Get(i) == (long.MaxValue - i), longs.Get(i) + " does not equal: " + (long.MaxValue - i) + " i=" + i);
            }

            FieldCache.Bytes bytes = cache.GetBytes(Reader, "theByte", Random().NextBoolean());
            Assert.AreSame(bytes, cache.GetBytes(Reader, "theByte", Random().NextBoolean()), "Second request to cache return same array");
            Assert.AreSame(bytes, cache.GetBytes(Reader, "theByte", FieldCache.DEFAULT_BYTE_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
            for (int i = 0; i < NUM_DOCS; i++)
            {
                Assert.IsTrue(bytes.Get(i) == (sbyte)(sbyte.MaxValue - i), bytes.Get(i) + " does not equal: " + (sbyte.MaxValue - i));
            }

            FieldCache.Shorts shorts = cache.GetShorts(Reader, "theShort", Random().NextBoolean());
            Assert.AreSame(shorts, cache.GetShorts(Reader, "theShort", Random().NextBoolean()), "Second request to cache return same array");
            Assert.AreSame(shorts, cache.GetShorts(Reader, "theShort", FieldCache.DEFAULT_SHORT_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
            for (int i = 0; i < NUM_DOCS; i++)
            {
                Assert.IsTrue(shorts.Get(i) == (short)(short.MaxValue - i), shorts.Get(i) + " does not equal: " + (short.MaxValue - i));
            }

            FieldCache.Ints ints = cache.GetInts(Reader, "theInt", Random().NextBoolean());
            Assert.AreSame(ints, cache.GetInts(Reader, "theInt", Random().NextBoolean()), "Second request to cache return same array");
            Assert.AreSame(ints, cache.GetInts(Reader, "theInt", FieldCache.DEFAULT_INT_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
            for (int i = 0; i < NUM_DOCS; i++)
            {
                Assert.IsTrue(ints.Get(i) == (int.MaxValue - i), ints.Get(i) + " does not equal: " + (int.MaxValue - i));
            }

            FieldCache.Floats floats = cache.GetFloats(Reader, "theFloat", Random().NextBoolean());
            Assert.AreSame(floats, cache.GetFloats(Reader, "theFloat", Random().NextBoolean()), "Second request to cache return same array");
            Assert.AreSame(floats, cache.GetFloats(Reader, "theFloat", FieldCache.DEFAULT_FLOAT_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
            for (int i = 0; i < NUM_DOCS; i++)
            {
                Assert.IsTrue(floats.Get(i) == (float.MaxValue - i), floats.Get(i) + " does not equal: " + (float.MaxValue - i));
            }

            Bits docsWithField = cache.GetDocsWithField(Reader, "theLong");

            Assert.AreSame(docsWithField, cache.GetDocsWithField(Reader, "theLong"), "Second request to cache return same array");
            Assert.IsTrue(docsWithField is Bits_MatchAllBits, "docsWithField(theLong) must be class Bits.MatchAllBits");
            Assert.IsTrue(docsWithField.Length() == NUM_DOCS, "docsWithField(theLong) Size: " + docsWithField.Length() + " is not: " + NUM_DOCS);
            for (int i = 0; i < docsWithField.Length(); i++)
            {
                Assert.IsTrue(docsWithField.Get(i));
            }

            docsWithField = cache.GetDocsWithField(Reader, "sparse");
            Assert.AreSame(docsWithField, cache.GetDocsWithField(Reader, "sparse"), "Second request to cache return same array");
            Assert.IsFalse(docsWithField is Bits_MatchAllBits, "docsWithField(sparse) must not be class Bits.MatchAllBits");
            Assert.IsTrue(docsWithField.Length() == NUM_DOCS, "docsWithField(sparse) Size: " + docsWithField.Length() + " is not: " + NUM_DOCS);
            for (int i = 0; i < docsWithField.Length(); i++)
            {
                Assert.AreEqual(i % 2 == 0, docsWithField.Get(i));
            }

            // getTermsIndex
            SortedDocValues termsIndex = cache.GetTermsIndex(Reader, "theRandomUnicodeString");

            Assert.AreSame(termsIndex, cache.GetTermsIndex(Reader, "theRandomUnicodeString"), "Second request to cache return same array");
            BytesRef br = new BytesRef();

            for (int i = 0; i < NUM_DOCS; i++)
            {
                BytesRef term;
                int      ord = termsIndex.GetOrd(i);
                if (ord == -1)
                {
                    term = null;
                }
                else
                {
                    termsIndex.LookupOrd(ord, br);
                    term = br;
                }
                string s = term == null ? null : term.Utf8ToString();
                Assert.IsTrue(UnicodeStrings[i] == null || UnicodeStrings[i].Equals(s), "for doc " + i + ": " + s + " does not equal: " + UnicodeStrings[i]);
            }

            int nTerms = termsIndex.ValueCount;

            TermsEnum tenum = termsIndex.TermsEnum();
            BytesRef  val   = new BytesRef();

            for (int i = 0; i < nTerms; i++)
            {
                BytesRef val1 = tenum.Next();
                termsIndex.LookupOrd(i, val);
                // System.out.println("i="+i);
                Assert.AreEqual(val, val1);
            }

            // seek the enum around (note this isn't a great test here)
            int num = AtLeast(100);

            for (int i = 0; i < num; i++)
            {
                int k = Random().Next(nTerms);
                termsIndex.LookupOrd(k, val);
                Assert.AreEqual(TermsEnum.SeekStatus.FOUND, tenum.SeekCeil(val));
                Assert.AreEqual(val, tenum.Term());
            }

            for (int i = 0; i < nTerms; i++)
            {
                termsIndex.LookupOrd(i, val);
                Assert.AreEqual(TermsEnum.SeekStatus.FOUND, tenum.SeekCeil(val));
                Assert.AreEqual(val, tenum.Term());
            }

            // test bad field
            termsIndex = cache.GetTermsIndex(Reader, "bogusfield");

            // getTerms
            BinaryDocValues terms = cache.GetTerms(Reader, "theRandomUnicodeString", true);

            Assert.AreSame(terms, cache.GetTerms(Reader, "theRandomUnicodeString", true), "Second request to cache return same array");
            Bits bits = cache.GetDocsWithField(Reader, "theRandomUnicodeString");

            for (int i = 0; i < NUM_DOCS; i++)
            {
                terms.Get(i, br);
                BytesRef term;
                if (!bits.Get(i))
                {
                    term = null;
                }
                else
                {
                    term = br;
                }
                string s = term == null ? null : term.Utf8ToString();
                Assert.IsTrue(UnicodeStrings[i] == null || UnicodeStrings[i].Equals(s), "for doc " + i + ": " + s + " does not equal: " + UnicodeStrings[i]);
            }

            // test bad field
            terms = cache.GetTerms(Reader, "bogusfield", false);

            // getDocTermOrds
            SortedSetDocValues termOrds = cache.GetDocTermOrds(Reader, "theRandomUnicodeMultiValuedField");
            int numEntries = cache.CacheEntries.Length;

            // ask for it again, and check that we didnt create any additional entries:
            termOrds = cache.GetDocTermOrds(Reader, "theRandomUnicodeMultiValuedField");
            Assert.AreEqual(numEntries, cache.CacheEntries.Length);

            for (int i = 0; i < NUM_DOCS; i++)
            {
                termOrds.Document = i;
                // this will remove identical terms. A DocTermOrds doesn't return duplicate ords for a docId
                IList <BytesRef> values = new List <BytesRef>(new /*Linked*/ HashSet <BytesRef>(Arrays.AsList(MultiValued[i])));
                foreach (BytesRef v in values)
                {
                    if (v == null)
                    {
                        // why does this test use null values... instead of an empty list: confusing
                        break;
                    }
                    long ord = termOrds.NextOrd();
                    Debug.Assert(ord != SortedSetDocValues.NO_MORE_ORDS);
                    BytesRef scratch = new BytesRef();
                    termOrds.LookupOrd(ord, scratch);
                    Assert.AreEqual(v, scratch);
                }
                Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, termOrds.NextOrd());
            }

            // test bad field
            termOrds = cache.GetDocTermOrds(Reader, "bogusfield");
            Assert.IsTrue(termOrds.ValueCount == 0);

            FieldCache.DEFAULT.PurgeByCacheKey(Reader.CoreCacheKey);
        }