public override int FindWithIndex(int val) { val -= min; if (val >= 0 && bitSet.Get(val)) { int index = -1; int counter = -1; while (true) { index = bitSet.NextSetBit(index + 1); if (index <= val && index != -1) { counter++; } else { break; } } return(counter); } else { return(-1); } }
protected virtual void Condense(float[] floats) { if (floats.Length != m_capacity) { throw new ArgumentException("bad input float array of length " + floats.Length + " for capacity: " + m_capacity); } var bits = new OpenBitSet(floats.Length); int on = 0; for (int i = 0; i < floats.Length; i++) { if (floats[i] != 0f) { bits.Set(i); on++; } } if (((float)on) / ((float)floats.Length) < ON_RATIO_CUTOFF) { // it's worth compressing if (0 == on) { // it's worth super-compressing m_floats = null; m_bits = null; m_referencePoints = null; // capacity is good. } else { m_bits = bits; m_floats = new float[m_bits.Cardinality()]; m_referencePoints = new int[floats.Length / REFERENCE_POINT_EVERY]; int i = 0; int floatsIdx = 0; int refIdx = 0; while (i < floats.Length && (i = m_bits.NextSetBit(i)) >= 0) { m_floats[floatsIdx] = floats[i]; while (refIdx < i / REFERENCE_POINT_EVERY) { m_referencePoints[refIdx++] = floatsIdx; } floatsIdx++; i++; } while (refIdx < m_referencePoints.Length) { m_referencePoints[refIdx++] = floatsIdx; } } } else { // it's not worth compressing m_floats = floats; m_bits = null; } }
/// <summary> /// warning: DO NOT modify the return value at all. /// the assumption is that these arrays are QUITE LARGE and that we would not want /// to unnecessarily copy them. this method in many cases returns an array from its /// internal representation. doing anything other than READING these values /// results in UNDEFINED operations on this, from that point on. /// </summary> /// <returns></returns> public virtual float[] Expand() { if (null == m_bits) { if (null == m_floats) { // super-compressed, all zeros return(new float[m_capacity]); } else { return(m_floats); } } float[] all = new float[m_capacity]; int floatsidx = 0; for (int idx = m_bits.NextSetBit(0); idx >= 0 && idx < m_capacity; idx = m_bits.NextSetBit(idx + 1)) { all[idx] = m_floats[floatsidx++]; } return(all); }
public override double GetFacetSelectivity(BoboSegmentReader reader) { FacetDataCache dataCache = m_facetDataCacheBuilder.Build(reader); OpenBitSet openBitSet = GetBitSet(dataCache); int[] frequencies = dataCache.Freqs; double selectivity = 0; int accumFreq = 0; int index = openBitSet.NextSetBit(0); while (index >= 0) { accumFreq += frequencies[index]; index = openBitSet.NextSetBit(index + 1); } int total = reader.MaxDoc; selectivity = (double)accumFreq / (double)total; if (selectivity > 0.999) { selectivity = 1.0; } return(selectivity); }
/// <summary> /// Minimizes the given automaton using Hopcroft's algorithm. /// </summary> public static void MinimizeHopcroft(Automaton a) { a.Determinize(); if (a.initial.numTransitions == 1) { Transition t = a.initial.TransitionsArray[0]; if (t.to == a.initial && t.min == Character.MIN_CODE_POINT && t.max == Character.MAX_CODE_POINT) { return; } } a.Totalize(); // initialize data structures int[] sigma = a.GetStartPoints(); State[] states = a.GetNumberedStates(); int sigmaLen = sigma.Length, statesLen = states.Length; List <State>[,] reverse = new List <State> [statesLen, sigmaLen]; ISet <State>[] partition = new EquatableSet <State> [statesLen]; List <State>[] splitblock = new List <State> [statesLen]; int[] block = new int[statesLen]; StateList[,] active = new StateList[statesLen, sigmaLen]; StateListNode[,] active2 = new StateListNode[statesLen, sigmaLen]; LinkedList <Int32Pair> pending = new LinkedList <Int32Pair>(); OpenBitSet pending2 = new OpenBitSet(sigmaLen * statesLen); OpenBitSet split = new OpenBitSet(statesLen), refine = new OpenBitSet(statesLen), refine2 = new OpenBitSet(statesLen); for (int q = 0; q < statesLen; q++) { splitblock[q] = new List <State>(); partition[q] = new EquatableSet <State>(); for (int x = 0; x < sigmaLen; x++) { active[q, x] = new StateList(); } } // find initial partition and reverse edges for (int q = 0; q < statesLen; q++) { State qq = states[q]; int j = qq.accept ? 0 : 1; partition[j].Add(qq); block[q] = j; for (int x = 0; x < sigmaLen; x++) { //List<State>[] r = reverse[qq.Step(sigma[x]).number]; var r = qq.Step(sigma[x]).number; if (reverse[r, x] == null) { reverse[r, x] = new List <State>(); } reverse[r, x].Add(qq); } } // initialize active sets for (int j = 0; j <= 1; j++) { for (int x = 0; x < sigmaLen; x++) { foreach (State qq in partition[j]) { if (reverse[qq.number, x] != null) { active2[qq.number, x] = active[j, x].Add(qq); } } } } // initialize pending for (int x = 0; x < sigmaLen; x++) { int j = (active[0, x].Count <= active[1, x].Count) ? 0 : 1; pending.AddLast(new Int32Pair(j, x)); pending2.Set(x * statesLen + j); } // process pending until fixed point int k = 2; while (pending.Count > 0) { Int32Pair ip = pending.First.Value; pending.Remove(ip); int p = ip.N1; int x = ip.N2; pending2.Clear(x * statesLen + p); // find states that need to be split off their blocks for (StateListNode m = active[p, x].First; m != null; m = m.Next) { List <State> r = reverse[m.Q.number, x]; if (r != null) { foreach (State s in r) { int i = s.number; if (!split.Get(i)) { split.Set(i); int j = block[i]; splitblock[j].Add(s); if (!refine2.Get(j)) { refine2.Set(j); refine.Set(j); } } } } } // refine blocks for (int j = refine.NextSetBit(0); j >= 0; j = refine.NextSetBit(j + 1)) { List <State> sb = splitblock[j]; if (sb.Count < partition[j].Count) { ISet <State> b1 = partition[j]; ISet <State> b2 = partition[k]; foreach (State s in sb) { b1.Remove(s); b2.Add(s); block[s.number] = k; for (int c = 0; c < sigmaLen; c++) { StateListNode sn = active2[s.number, c]; if (sn != null && sn.Sl == active[j, c]) { sn.Remove(); active2[s.number, c] = active[k, c].Add(s); } } } // update pending for (int c = 0; c < sigmaLen; c++) { int aj = active[j, c].Count, ak = active[k, c].Count, ofs = c * statesLen; if (!pending2.Get(ofs + j) && 0 < aj && aj <= ak) { pending2.Set(ofs + j); pending.AddLast(new Int32Pair(j, c)); } else { pending2.Set(ofs + k); pending.AddLast(new Int32Pair(k, c)); } } k++; } refine2.Clear(j); foreach (State s in sb) { split.Clear(s.number); } sb.Clear(); } refine.Clear(0, refine.Length - 1); } // make a new state for each equivalence class, set initial state State[] newstates = new State[k]; for (int n = 0; n < newstates.Length; n++) { State s = new State(); newstates[n] = s; foreach (State q in partition[n]) { if (q == a.initial) { a.initial = s; } s.accept = q.accept; s.number = q.number; // select representative q.number = n; } } // build transitions and set acceptance for (int n = 0; n < newstates.Length; n++) { State s = newstates[n]; s.accept = states[s.number].accept; foreach (Transition t in states[s.number].GetTransitions()) { s.AddTransition(new Transition(t.min, t.max, newstates[t.to.number])); } } a.ClearNumberedStates(); a.RemoveDeadTransitions(); }