public void TestFilteredDocSetIterator() { var set1 = new IntArrayDocIdSet(); for (int i = 0; i < 100; i++) { set1.AddDoc(2 * i); // 100 even numbers } var filteredIter = new MyFilteredDocSetIterator(set1.Iterator()); var bs = new BitSet(200); for (int i = 0; i < 100; ++i) { int n = 10 * i; if (n < 200) { bs.Set(n, true); } } try { while (filteredIter.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { int doc = filteredIter.DocID(); if (!bs.Get(doc)) { Assert.Fail("failed: " + doc + " not in expected set"); return; } else { bs.Set(doc, false); } } var cardinality = bs.Cardinality(); if (cardinality > 0) { Assert.Fail("failed: leftover cardinality: " + cardinality); } } catch (Exception e) { Assert.Fail(e.Message); } }
protected virtual void Condense(float[] floats) { if (floats.Length != _capacity) { throw new ArgumentException("bad input float array of length " + floats.Length + " for capacity: " + _capacity); } var bits = new BitSet(floats.Length); int on = 0; for (int i = 0; i < floats.Length; i++) { if (floats[i] != 0f) { bits.Set(i, true); on++; } } if (((float)on) / ((float)floats.Length) < ON_RATIO_CUTOFF) { // it's worth compressing if (0 == on) { // it's worth super-compressing _floats = null; _bits = null; _referencePoints = null; // capacity is good. } else { _bits = bits; _floats = new float[_bits.Cardinality()]; _referencePoints = new int[floats.Length / REFERENCE_POINT_EVERY]; int i = 0; int floatsIdx = 0; int refIdx = 0; while (i < floats.Length && (i = _bits.NextSetBit(i)) >= 0) { _floats[floatsIdx] = floats[i]; while (refIdx < i / REFERENCE_POINT_EVERY) { _referencePoints[refIdx++] = floatsIdx; } floatsIdx++; i++; } while (refIdx < _referencePoints.Length) { _referencePoints[refIdx++] = floatsIdx; } } } else { // it's not worth compressing _floats = floats; _bits = null; } }