internal readonly bool isSegmentPrivate; // set to true iff this frozen packet represents // a segment private deletes. in that case is should // only have Queries public FrozenBufferedUpdates(BufferedUpdates deletes, bool isSegmentPrivate) { this.isSegmentPrivate = isSegmentPrivate; Debug.Assert(!isSegmentPrivate || deletes.terms.Count == 0, "segment private package should only have del queries"); Term[] termsArray = deletes.terms.Keys.ToArray(/*new Term[deletes.Terms.Count]*/); termCount = termsArray.Length; ArrayUtil.TimSort(termsArray); PrefixCodedTerms.Builder builder = new PrefixCodedTerms.Builder(); foreach (Term term in termsArray) { builder.Add(term); } terms = builder.Finish(); queries = new Query[deletes.queries.Count]; queryLimits = new int[deletes.queries.Count]; int upto = 0; foreach (KeyValuePair <Query, int?> ent in deletes.queries) { queries[upto] = ent.Key; if (ent.Value.HasValue) { queryLimits[upto] = ent.Value.Value; } else { // LUCENENET NOTE: According to this: http://stackoverflow.com/a/13914344 // we are supposed to throw an exception in this case, rather than // silently fail. throw new NullReferenceException(); } upto++; } // TODO if a Term affects multiple fields, we could keep the updates key'd by Term // so that it maps to all fields it affects, sorted by their docUpto, and traverse // that Term only once, applying the update to all fields that still need to be // updated. IList <NumericDocValuesUpdate> allNumericUpdates = new List <NumericDocValuesUpdate>(); int numericUpdatesSize = 0; foreach (var numericUpdates in deletes.numericUpdates.Values) { foreach (NumericDocValuesUpdate update in numericUpdates.Values) { allNumericUpdates.Add(update); numericUpdatesSize += update.GetSizeInBytes(); } } numericDVUpdates = allNumericUpdates.ToArray(); // TODO if a Term affects multiple fields, we could keep the updates key'd by Term // so that it maps to all fields it affects, sorted by their docUpto, and traverse // that Term only once, applying the update to all fields that still need to be // updated. IList <BinaryDocValuesUpdate> allBinaryUpdates = new List <BinaryDocValuesUpdate>(); int binaryUpdatesSize = 0; foreach (var binaryUpdates in deletes.binaryUpdates.Values) { foreach (BinaryDocValuesUpdate update in binaryUpdates.Values) { allBinaryUpdates.Add(update); binaryUpdatesSize += update.GetSizeInBytes(); } } binaryDVUpdates = allBinaryUpdates.ToArray(); bytesUsed = (int)terms.GetSizeInBytes() + queries.Length * BYTES_PER_DEL_QUERY + numericUpdatesSize + numericDVUpdates.Length * RamUsageEstimator.NUM_BYTES_OBJECT_REF + binaryUpdatesSize + binaryDVUpdates.Length * RamUsageEstimator.NUM_BYTES_OBJECT_REF; numTermDeletes = deletes.numTermDeletes.Get(); }
internal readonly bool isSegmentPrivate; // set to true iff this frozen packet represents // a segment private deletes. in that case is should // only have Queries public FrozenBufferedUpdates(BufferedUpdates deletes, bool isSegmentPrivate) { this.isSegmentPrivate = isSegmentPrivate; if (Debugging.AssertsEnabled) { Debugging.Assert(!isSegmentPrivate || deletes.terms.Count == 0, "segment private package should only have del queries"); } Term[] termsArray = deletes.terms.Keys.ToArray(/*new Term[deletes.terms.Count]*/); termCount = termsArray.Length; ArrayUtil.TimSort(termsArray); PrefixCodedTerms.Builder builder = new PrefixCodedTerms.Builder(); foreach (Term term in termsArray) { builder.Add(term); } terms = builder.Finish(); queries = new Query[deletes.queries.Count]; queryLimits = new int[deletes.queries.Count]; int upto = 0; foreach (KeyValuePair <Query, int> ent in deletes.queries) { queries[upto] = ent.Key; queryLimits[upto] = ent.Value; upto++; } // TODO if a Term affects multiple fields, we could keep the updates key'd by Term // so that it maps to all fields it affects, sorted by their docUpto, and traverse // that Term only once, applying the update to all fields that still need to be // updated. IList <NumericDocValuesUpdate> allNumericUpdates = new JCG.List <NumericDocValuesUpdate>(); int numericUpdatesSize = 0; foreach (var numericUpdates in deletes.numericUpdates.Values) { foreach (NumericDocValuesUpdate update in numericUpdates.Values) { allNumericUpdates.Add(update); numericUpdatesSize += update.GetSizeInBytes(); } } numericDVUpdates = allNumericUpdates.ToArray(); // TODO if a Term affects multiple fields, we could keep the updates key'd by Term // so that it maps to all fields it affects, sorted by their docUpto, and traverse // that Term only once, applying the update to all fields that still need to be // updated. IList <BinaryDocValuesUpdate> allBinaryUpdates = new JCG.List <BinaryDocValuesUpdate>(); int binaryUpdatesSize = 0; foreach (var binaryUpdates in deletes.binaryUpdates.Values) { foreach (BinaryDocValuesUpdate update in binaryUpdates.Values) { allBinaryUpdates.Add(update); binaryUpdatesSize += update.GetSizeInBytes(); } } binaryDVUpdates = allBinaryUpdates.ToArray(); bytesUsed = (int)terms.GetSizeInBytes() + queries.Length * BYTES_PER_DEL_QUERY + numericUpdatesSize + numericDVUpdates.Length * RamUsageEstimator.NUM_BYTES_OBJECT_REF + binaryUpdatesSize + binaryDVUpdates.Length * RamUsageEstimator.NUM_BYTES_OBJECT_REF; numTermDeletes = deletes.numTermDeletes; }