public static Slice[] Merge(Slice prefix, [NotNull] Slice[] keys) { if (prefix == null) { throw new ArgumentNullException("prefix"); } if (keys == null) { throw new ArgumentNullException("keys"); } //REVIEW: merge this code with Slice.ConcatRange! // we can pre-allocate exactly the buffer by computing the total size of all keys int size = keys.Sum(key => key.Count) + keys.Length * prefix.Count; var writer = new SliceWriter(size); var next = new List <int>(keys.Length); //TODO: use multiple buffers if item count is huge ? foreach (var key in keys) { if (prefix.IsPresent) { writer.WriteBytes(prefix); } writer.WriteBytes(key); next.Add(writer.Position); } return(FdbKey.SplitIntoSegments(writer.Buffer, 0, next)); }
public static Slice[] Merge(Slice prefix, [NotNull] IEnumerable <Slice> keys) { if (prefix == null) { throw new ArgumentNullException("prefix"); } if (keys == null) { throw new ArgumentNullException("keys"); } //REVIEW: merge this code with Slice.ConcatRange! // use optimized version for arrays var array = keys as Slice[]; if (array != null) { return(Merge(prefix, array)); } // pre-allocate with a count if we can get one... var coll = keys as ICollection <Slice>; var next = coll == null ? new List <int>() : new List <int>(coll.Count); var writer = SliceWriter.Empty; //TODO: use multiple buffers if item count is huge ? foreach (var key in keys) { if (prefix.IsPresent) { writer.WriteBytes(prefix); } writer.WriteBytes(key); next.Add(writer.Position); } return(FdbKey.SplitIntoSegments(writer.Buffer, 0, next)); }