/// <summary> /// Set the data for this Bloom filter. /// </summary> /// <param name="data">The data to restore</param> public virtual void Rehydrate(IInvertibleBloomFilterData <TId, int, TCount> data) { if (data == null) { return; } if (!data.IsValid()) { throw new ArgumentException( "Invertible Bloom filter data is invalid.", nameof(data)); } Data = data.ConvertToBloomFilterData(Configuration); ValidateData(); }
/// <summary> /// Intersect Bloom filter data. /// </summary> /// <typeparam name="TEntity"></typeparam> /// <typeparam name="TId"></typeparam> /// <typeparam name="THash"></typeparam> /// <typeparam name="TCount"></typeparam> /// <param name="filterData"></param> /// <param name="configuration"></param> /// <param name="otherFilterData"></param> /// <param name="inPlace"></param> /// <returns></returns> internal static InvertibleBloomFilterData <TId, THash, TCount> Intersect <TEntity, TId, THash, TCount>( this IInvertibleBloomFilterData <TId, THash, TCount> filterData, IInvertibleBloomFilterConfiguration <TEntity, TId, THash, TCount> configuration, IInvertibleBloomFilterData <TId, THash, TCount> otherFilterData, bool inPlace = false ) where TId : struct where THash : struct where TCount : struct { if (filterData == null && otherFilterData == null) { return(null); } if (filterData == null) { return(configuration .DataFactory .Create(configuration, otherFilterData.Capacity, otherFilterData.BlockSize, otherFilterData.HashFunctionCount)); } else { filterData.SyncCompressionProviders(configuration); } if (otherFilterData == null) { if (inPlace) { filterData.Clear(configuration); return(filterData.ConvertToBloomFilterData(configuration)); } return(configuration .DataFactory .Create(configuration, filterData.Capacity, filterData.BlockSize, filterData.HashFunctionCount)); } else { otherFilterData.SyncCompressionProviders(configuration); } if (!filterData.IsCompatibleWith(otherFilterData, configuration)) { return(null); } var foldFactors = configuration.FoldingStrategy?.GetFoldFactors(filterData.BlockSize, otherFilterData.BlockSize); var res = inPlace && foldFactors?.Item1 <= 1 ? filterData.ConvertToBloomFilterData(configuration) : (foldFactors == null || foldFactors.Item1 <= 1 ? filterData.CreateDummy(configuration) : configuration.DataFactory.Create( configuration, filterData.Capacity / foldFactors.Item1, filterData.BlockSize / foldFactors.Item1, filterData.HashFunctionCount)); foldFactors = foldFactors ?? new Tuple <long, long>(1, 1); res.IsReverse = filterData.IsReverse; Parallel.ForEach( Partitioner.Create(0L, res.BlockSize), (range, state) => { for (var i = range.Item1; i < range.Item2; i++) { var filterDataCount = filterData.Counts.GetFolded(i, foldFactors.Item1, configuration.CountConfiguration.Add); var otherFilterDataCount = otherFilterData.Counts.GetFolded(i, foldFactors.Item2, configuration.CountConfiguration.Add); res.Counts[i] = configuration.CountConfiguration.Comparer.Compare(filterDataCount, otherFilterDataCount) < 0 ? filterDataCount : otherFilterDataCount; res.IdSumProvider[i] = configuration.IdIntersect( filterData.IdSumProvider.GetFolded(i, filterData.BlockSize, foldFactors.Item1, configuration.IdAdd), otherFilterData.IdSumProvider.GetFolded(i, otherFilterData.BlockSize, foldFactors.Item2, configuration.IdAdd)); res.HashSumProvider[i] = configuration.HashIntersect( filterData.HashSumProvider.GetFolded(i, filterData.BlockSize, foldFactors.Item1, configuration.HashAdd), otherFilterData.HashSumProvider.GetFolded(i, otherFilterData.BlockSize, foldFactors.Item2, configuration.HashAdd)); } }); res.SubFilter = filterData .SubFilter .Intersect(configuration.SubFilterConfiguration, otherFilterData.SubFilter, inPlace) .ConvertToBloomFilterData(configuration); res.ItemCount = configuration.CountConfiguration.GetEstimatedCount(res.Counts, res.HashFunctionCount); return(res); }