public virtual void TestRandom()
        {
            JCG.SortedSet <Term> terms = new JCG.SortedSet <Term>();
            int nterms = AtLeast(10000);

            for (int i = 0; i < nterms; i++)
            {
                Term term = new Term(TestUtil.RandomUnicodeString(Random, 2), TestUtil.RandomUnicodeString(Random));
                terms.Add(term);
            }

            PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
            foreach (Term @ref in terms)
            {
                b.Add(@ref);
            }
            PrefixCodedTerms pb = b.Finish();

            IEnumerator <Term> expected = terms.GetEnumerator();

            foreach (Term t in pb)
            {
                Assert.IsTrue(expected.MoveNext());
                Assert.AreEqual(expected.Current, t);
            }
            Assert.IsFalse(expected.MoveNext());
        }
        public virtual void TestEmpty()
        {
            PrefixCodedTerms.Builder b  = new PrefixCodedTerms.Builder();
            PrefixCodedTerms         pb = b.Finish();

            Assert.IsFalse(pb.GetEnumerator().MoveNext());
        }
        public virtual void TestOne()
        {
            Term term = new Term("foo", "bogus");

            PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
            b.Add(term);
            PrefixCodedTerms   pb       = b.Finish();
            IEnumerator <Term> iterator = pb.GetEnumerator();

            Assert.IsTrue(iterator.MoveNext());
            Assert.AreEqual(term, iterator.Current);
        }
        public virtual void TestMergeRandom()
        {
            PrefixCodedTerms[]   pb       = new PrefixCodedTerms[TestUtil.NextInt32(Random, 2, 10)];
            JCG.SortedSet <Term> superSet = new JCG.SortedSet <Term>();

            for (int i = 0; i < pb.Length; i++)
            {
                JCG.SortedSet <Term> terms = new JCG.SortedSet <Term>();
                int nterms = TestUtil.NextInt32(Random, 0, 10000);
                for (int j = 0; j < nterms; j++)
                {
                    Term term = new Term(TestUtil.RandomUnicodeString(Random, 2), TestUtil.RandomUnicodeString(Random, 4));
                    terms.Add(term);
                }
                superSet.UnionWith(terms);

                PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
                foreach (Term @ref in terms)
                {
                    b.Add(@ref);
                }
                pb[i] = b.Finish();
            }

            List <IEnumerator <Term> > subs = new List <IEnumerator <Term> >();

            for (int i = 0; i < pb.Length; i++)
            {
                subs.Add(pb[i].GetEnumerator());
            }

            IEnumerator <Term> expected = superSet.GetEnumerator();
            IEnumerator <Term> actual   = new MergedEnumerator <Term>(subs.ToArray());

            while (actual.MoveNext())
            {
                Assert.IsTrue(expected.MoveNext());
                Assert.AreEqual(expected.Current, actual.Current);
            }
            Assert.IsFalse(expected.MoveNext());
        }
        public virtual void TestMergeOne()
        {
            Term t1 = new Term("foo", "a");

            PrefixCodedTerms.Builder b1 = new PrefixCodedTerms.Builder();
            b1.Add(t1);
            PrefixCodedTerms pb1 = b1.Finish();

            Term t2 = new Term("foo", "b");

            PrefixCodedTerms.Builder b2 = new PrefixCodedTerms.Builder();
            b2.Add(t2);
            PrefixCodedTerms pb2 = b2.Finish();

            IEnumerator <Term> merged = new MergedEnumerator <Term>(pb1.GetEnumerator(), pb2.GetEnumerator());

            Assert.IsTrue(merged.MoveNext());
            Assert.AreEqual(t1, merged.Current);
            Assert.IsTrue(merged.MoveNext());
            Assert.AreEqual(t2, merged.Current);
        }
        internal readonly bool isSegmentPrivate; // set to true iff this frozen packet represents
        // a segment private deletes. in that case is should
        // only have Queries

        public FrozenBufferedUpdates(BufferedUpdates deletes, bool isSegmentPrivate)
        {
            this.isSegmentPrivate = isSegmentPrivate;
            Debug.Assert(!isSegmentPrivate || deletes.terms.Count == 0, "segment private package should only have del queries");
            Term[] termsArray = deletes.terms.Keys.ToArray(/*new Term[deletes.Terms.Count]*/);
            termCount = termsArray.Length;
            ArrayUtil.TimSort(termsArray);
            PrefixCodedTerms.Builder builder = new PrefixCodedTerms.Builder();
            foreach (Term term in termsArray)
            {
                builder.Add(term);
            }
            terms = builder.Finish();

            queries     = new Query[deletes.queries.Count];
            queryLimits = new int[deletes.queries.Count];
            int upto = 0;

            foreach (KeyValuePair <Query, int?> ent in deletes.queries)
            {
                queries[upto] = ent.Key;
                if (ent.Value.HasValue)
                {
                    queryLimits[upto] = ent.Value.Value;
                }
                else
                {
                    // LUCENENET NOTE: According to this: http://stackoverflow.com/a/13914344
                    // we are supposed to throw an exception in this case, rather than
                    // silently fail.
                    throw new NullReferenceException();
                }
                upto++;
            }

            // TODO if a Term affects multiple fields, we could keep the updates key'd by Term
            // so that it maps to all fields it affects, sorted by their docUpto, and traverse
            // that Term only once, applying the update to all fields that still need to be
            // updated.
            IList <NumericDocValuesUpdate> allNumericUpdates = new List <NumericDocValuesUpdate>();
            int numericUpdatesSize = 0;

            foreach (var numericUpdates in deletes.numericUpdates.Values)
            {
                foreach (NumericDocValuesUpdate update in numericUpdates.Values)
                {
                    allNumericUpdates.Add(update);
                    numericUpdatesSize += update.GetSizeInBytes();
                }
            }
            numericDVUpdates = allNumericUpdates.ToArray();

            // TODO if a Term affects multiple fields, we could keep the updates key'd by Term
            // so that it maps to all fields it affects, sorted by their docUpto, and traverse
            // that Term only once, applying the update to all fields that still need to be
            // updated.
            IList <BinaryDocValuesUpdate> allBinaryUpdates = new List <BinaryDocValuesUpdate>();
            int binaryUpdatesSize = 0;

            foreach (var binaryUpdates in deletes.binaryUpdates.Values)
            {
                foreach (BinaryDocValuesUpdate update in binaryUpdates.Values)
                {
                    allBinaryUpdates.Add(update);
                    binaryUpdatesSize += update.GetSizeInBytes();
                }
            }
            binaryDVUpdates = allBinaryUpdates.ToArray();

            bytesUsed = (int)terms.GetSizeInBytes() + queries.Length * BYTES_PER_DEL_QUERY + numericUpdatesSize + numericDVUpdates.Length * RamUsageEstimator.NUM_BYTES_OBJECT_REF + binaryUpdatesSize + binaryDVUpdates.Length * RamUsageEstimator.NUM_BYTES_OBJECT_REF;

            numTermDeletes = deletes.numTermDeletes.Get();
        }
        internal readonly bool IsSegmentPrivate; // set to true iff this frozen packet represents
        // a segment private deletes. in that case is should
        // only have Queries

        public FrozenBufferedUpdates(BufferedUpdates deletes, bool isSegmentPrivate)
        {
            this.IsSegmentPrivate = isSegmentPrivate;
            Debug.Assert(!isSegmentPrivate || deletes.Terms.Count == 0, "segment private package should only have del queries");
            Term[] termsArray = deletes.Terms.Keys.ToArray(/*new Term[deletes.Terms.Count]*/);
            termCount = termsArray.Length;
            ArrayUtil.TimSort(termsArray);
            PrefixCodedTerms.Builder builder = new PrefixCodedTerms.Builder();
            foreach (Term term in termsArray)
            {
                builder.Add(term);
            }
            Terms = builder.Finish();

            Queries     = new Query[deletes.Queries.Count];
            QueryLimits = new int?[deletes.Queries.Count];
            int upto = 0;

            foreach (KeyValuePair <Query, int?> ent in deletes.Queries)
            {
                Queries[upto]     = ent.Key;
                QueryLimits[upto] = ent.Value;
                upto++;
            }

            // TODO if a Term affects multiple fields, we could keep the updates key'd by Term
            // so that it maps to all fields it affects, sorted by their docUpto, and traverse
            // that Term only once, applying the update to all fields that still need to be
            // updated.
            IList <NumericDocValuesUpdate> allNumericUpdates = new List <NumericDocValuesUpdate>();
            int numericUpdatesSize = 0;

            foreach (/*Linked*/ HashMap <Term, NumericDocValuesUpdate> numericUpdates in deletes.NumericUpdates.Values)
            {
                foreach (NumericDocValuesUpdate update in numericUpdates.Values)
                {
                    allNumericUpdates.Add(update);
                    numericUpdatesSize += update.SizeInBytes();
                }
            }
            NumericDVUpdates = allNumericUpdates.ToArray();

            // TODO if a Term affects multiple fields, we could keep the updates key'd by Term
            // so that it maps to all fields it affects, sorted by their docUpto, and traverse
            // that Term only once, applying the update to all fields that still need to be
            // updated.
            IList <BinaryDocValuesUpdate> allBinaryUpdates = new List <BinaryDocValuesUpdate>();
            int binaryUpdatesSize = 0;

            foreach (/*Linked*/ HashMap <Term, BinaryDocValuesUpdate> binaryUpdates in deletes.BinaryUpdates.Values)
            {
                foreach (BinaryDocValuesUpdate update in binaryUpdates.Values)
                {
                    allBinaryUpdates.Add(update);
                    binaryUpdatesSize += update.SizeInBytes();
                }
            }
            BinaryDVUpdates = allBinaryUpdates.ToArray();

            BytesUsed = (int)Terms.SizeInBytes + Queries.Length * BYTES_PER_DEL_QUERY + numericUpdatesSize + NumericDVUpdates.Length * RamUsageEstimator.NUM_BYTES_OBJECT_REF + binaryUpdatesSize + BinaryDVUpdates.Length * RamUsageEstimator.NUM_BYTES_OBJECT_REF;

            NumTermDeletes = deletes.NumTermDeletes.Get();
        }