public static FilterModel FromFullLine(string line) { Guard.NotNullOrEmptyOrWhitespace(nameof(line), line); string[] parts = line.Split(':'); if (parts.Length <= 1) { throw new ArgumentException(nameof(line), line); } else if (parts.Length == 2) // no bech here { return(new FilterModel { BlockHeight = new Height(parts[0]), BlockHash = new uint256(parts[1]), Filter = null }); } var data = Encoders.Hex.DecodeData(parts[2]); var filter = new GolombRiceFilter(data, 20, 1 << 20); return(new FilterModel { BlockHeight = new Height(parts[0]), BlockHash = new uint256(parts[1]), Filter = filter }); }
public async Task InconsistentImmatureIndexAsync() { var(dir, _, immatureFilters) = await GetIndexStorePathsAsync(); var network = Network.Main; var headersChain = new SmartHeaderChain(); await using var indexStore = new IndexStore(dir, network, headersChain); var dummyFilter = GolombRiceFilter.Parse("00"); var startingFilter = StartingFilters.GetStartingFilter(network); var immatureIndexStoreContent = new[] { new FilterModel(new SmartHeader(new uint256(2), startingFilter.Header.BlockHash, startingFilter.Header.Height + 1, MinutesAgo(30)), dummyFilter), new FilterModel(new SmartHeader(new uint256(3), new uint256(2), startingFilter.Header.Height + 2, MinutesAgo(20)), dummyFilter), new FilterModel(new SmartHeader(new uint256(99), new uint256(98), startingFilter.Header.Height + 98, MinutesAgo(10)), dummyFilter) }; await File.WriteAllLinesAsync(immatureFilters, immatureIndexStoreContent.Select(x => x.ToLine())); await Assert.ThrowsAsync <InvalidOperationException>(async() => await indexStore.InitializeAsync()); Assert.Equal(new uint256(3), headersChain.TipHash); Assert.Equal(startingFilter.Header.Height + 2u, headersChain.TipHeight); // Check if the immatureIndex is deleted Assert.False(File.Exists(immatureFilters)); }
public static FilterModel FromFullLine(string line) { Guard.NotNullOrEmptyOrWhitespace(nameof(line), line); string[] parts = line.Split(':'); GolombRiceFilter filter; if (parts.Length <= 1) { throw new ArgumentException(nameof(line), line); } else if (parts.Length == 2) // no bech here { filter = null; } else { var data = Encoders.Hex.DecodeData(parts[2]); filter = new GolombRiceFilter(data, 20, 1 << 20); } if (Height.TryParse(parts[0], out Height blockHeight)) { return(new FilterModel { BlockHeight = blockHeight, BlockHash = new uint256(parts[1]), Filter = filter }); } else { throw new FormatException($"Could not parse {nameof(Height)}."); } }
public void GenerateTestVectorsTest() { var testLines = File.ReadAllLines("data/bip158_vectors.csv"); foreach (var testLine in testLines.Skip(1)) { var i = 0; var test = testLine.Split(','); var testBlockHeight = int.Parse(test[i++]); var testBlockHash = uint256.Parse(test[i++]); var testBlock = Block.Parse(test[i++]); var testPreviousBasicHeader = uint256.Parse(test[i++]); var testPreviousExtHeader = uint256.Parse(test[i++]); var testBasicFilter = test[i++]; var testExtFilter = test[i++]; var testBasicHeader = test[i++]; var testExtHeader = test[i++]; var basicFilter = GolombRiceFilterBuilder.BuildBasicFilter(testBlock); Assert.Equal(testBasicFilter, basicFilter.ToString()); Assert.Equal(testBasicHeader, basicFilter.GetHeader(testPreviousBasicHeader).ToString()); testExtFilter = !string.IsNullOrEmpty(testExtFilter) ? testExtFilter : "00"; var extFilter = GolombRiceFilterBuilder.BuildExtendedFilter(testBlock); Assert.Equal(testExtFilter, extFilter.ToString()); Assert.Equal(testExtHeader, extFilter.GetHeader(testPreviousExtHeader).ToString()); var deserializedBasicFilter = GolombRiceFilter.Parse(testBasicFilter); Assert.Equal(testBasicFilter, deserializedBasicFilter.ToString()); var deserializedExtFilter = GolombRiceFilter.Parse(testExtFilter); Assert.Equal(testExtFilter, deserializedExtFilter.ToString()); } }
public static FilterModel FromHeightlessLine(string line, Height height) { Guard.NotNullOrEmptyOrWhitespace(nameof(line), line); var parts = line.Split(':'); if (parts.Length == 1) // no bech here { return(new FilterModel { BlockHeight = Guard.NotNull(nameof(height), height), BlockHash = new uint256(parts[0]), Filter = null }); } var data = Encoders.Hex.DecodeData(parts[1]); var filter = new GolombRiceFilter(data, 20, 1 << 20); return(new FilterModel { BlockHeight = Guard.NotNull(nameof(height), height), BlockHash = new uint256(parts[0]), Filter = filter }); }
public void BuildFilterAndMatchValues() { var names = from name in new[] { "New York", "Amsterdam", "Paris", "Buenos Aires", "La Habana" } select Encoding.ASCII.GetBytes(name); var key = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 }; var filter = GolombRiceFilter.Build(key, names, 0x10); // The filter should match all ther values that were added. foreach (var name in names) { Assert.True(filter.Match(name, key)); } // The filter should NOT match any extra value. Assert.False(filter.Match(Encoding.ASCII.GetBytes("Porto Alegre"), key)); Assert.False(filter.Match(Encoding.ASCII.GetBytes("Madrid"), key)); // The filter should match because it has one element indexed: Buenos Aires. var otherCities = new[] { "La Paz", "Barcelona", "El Cairo", "Buenos Aires", "Asunción" }; var otherNames = from name in otherCities select Encoding.ASCII.GetBytes(name); Assert.True(filter.MatchAny(otherNames, key)); // The filter should NOT match because it doesn't have any element indexed. var otherCities2 = new[] { "La Paz", "Barcelona", "El Cairo", "Córdoba", "Asunción" }; var otherNames2 = from name in otherCities2 select Encoding.ASCII.GetBytes(name); Assert.False(filter.MatchAny(otherNames2, key)); }
public static FilterModel FromLine(string line, Height height) { Guard.NotNullOrEmptyOrWhitespace(nameof(line), line); var parts = line.Split(':'); if (parts.Length == 1) // no bech here { return(new FilterModel { BlockHeight = Guard.NotNull(nameof(height), height), BlockHash = new uint256(parts[0]), Filter = null }); } else { var n = int.Parse(parts[1]); var fba = new FastBitArray(ByteHelpers.FromHex(parts[3])); fba.Length = int.Parse(parts[2]); var filter = new GolombRiceFilter(fba, n); return(new FilterModel { BlockHeight = Guard.NotNull(nameof(height), height), BlockHash = new uint256(parts[0]), Filter = filter }); } }
public GolombRiceFilter Build(Block block) { var key = block.GetHash().ToBytes(); var buffer = new List <byte[]> { key }; foreach (var tx in block.Transactions) { foreach (var txOutput in tx.Outputs) { var isValidPayToWitness = P2wpkh.CheckScriptPubKey(txOutput.ScriptPubKey); if (isValidPayToWitness) { var witKeyId = P2wpkh.ExtractScriptPubKeyParameters(txOutput.ScriptPubKey); buffer.Add(witKeyId.ToBytes()); } } } return(GolombRiceFilter.Build(key, buffer, P)); }
public void Setup() { var keyBuffer = new byte[32]; _random.NextBytes(keyBuffer); var key = new uint256(keyBuffer); _testKey = key.ToBytes().Take(16).ToArray(); var builder = new GolombRiceFilterBuilder() .SetKey(key) .SetP(20); var itemsInFilter = new List <byte[]>(); for (var j = 0; j < N; j++) { var data = new byte[_random.Next(20, 30)]; _random.NextBytes(data); itemsInFilter.Add(data); } builder.AddEntries(itemsInFilter); _sample = itemsInFilter.OrderBy(x => _random.Next()).Take(N / 200).ToArray(); _filter = builder.Build(); }
public async Task GapInIndexAsync() { var(dir, matureFilters, immatureFilters) = await GetIndexStorePathsAsync(); var network = Network.Main; var headersChain = new SmartHeaderChain(); await using var indexStore = new IndexStore(dir, network, headersChain); var dummyFilter = GolombRiceFilter.Parse("00"); var matureIndexStoreContent = new[] { new FilterModel(new SmartHeader(new uint256(2), new uint256(1), 1, MinutesAgo(30)), dummyFilter), new FilterModel(new SmartHeader(new uint256(3), new uint256(2), 2, MinutesAgo(20)), dummyFilter), }; await File.WriteAllLinesAsync(matureFilters, matureIndexStoreContent.Select(x => x.ToLine())); var immatureIndexStoreContent = new[] { new FilterModel(new SmartHeader(new uint256(5), new uint256(4), 4, MinutesAgo(30)), dummyFilter), new FilterModel(new SmartHeader(new uint256(6), new uint256(5), 5, MinutesAgo(20)), dummyFilter), }; await File.WriteAllLinesAsync(immatureFilters, immatureIndexStoreContent.Select(x => x.ToLine())); await Assert.ThrowsAsync <InvalidOperationException>(async() => await indexStore.InitializeAsync()); Assert.Equal(new uint256(3), headersChain.TipHash); Assert.Equal(2u, headersChain.TipHeight); Assert.True(File.Exists(matureFilters)); // mature filters are ok Assert.False(File.Exists(immatureFilters)); // immature filters are NOT ok }
public void GenerateTestVectorsTest() { var tests = TestCase.read_json("data/bip158_vectors.json"); foreach (var test in tests.Skip(1)) { var i = 0; var testBlockHeight = test[i++]; var testBlockHash = uint256.Parse((string)test[i++]); var testBlock = Block.Parse((string)test[i++]); var testPreviousBasicHeader = uint256.Parse((string)test[i++]); var testPreviousExtHeader = uint256.Parse((string)test[i++]); var testBasicFilter = (string)test[i++]; var testExtFilter = (string)test[i++]; var testBasicHeader = (string)test[i++]; var testExtHeader = (string)test[i++]; var message = (string)test[i++]; var basicFilter = GolombRiceFilterBuilder.BuildBasicFilter(testBlock); Assert.Equal(testBasicFilter, basicFilter.ToString()); Assert.Equal(testBasicHeader, basicFilter.GetHeader(testPreviousBasicHeader).ToString()); var extFilter = GolombRiceFilterBuilder.BuildExtendedFilter(testBlock); Assert.Equal(testExtFilter, extFilter.ToString()); Assert.Equal(testExtHeader, extFilter.GetHeader(testPreviousExtHeader).ToString()); var deserializedBasicFilter = GolombRiceFilter.Parse(testBasicFilter); Assert.Equal(testBasicFilter, deserializedBasicFilter.ToString()); var deserializedExtFilter = GolombRiceFilter.Parse(testExtFilter); Assert.Equal(testExtFilter, deserializedExtFilter.ToString()); } }
public void CreateStoreTest() { const byte P = 20; const int blockCount = 100; const int maxBlockSize = 4 * 1000 * 1000; const int avgTxSize = 250; // Currently the average is around 1kb. const int txoutCountPerBlock = maxBlockSize / avgTxSize; const int avgTxoutPushDataSize = 20; // P2PKH scripts has 20 bytes. var key = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 }; // Generation of data to be added into the filter var random = new Random(); var dataDirectory = new DirectoryInfo(Path.Combine(SharedFixture.DataDir, nameof(CreateStoreTest))); if (dataDirectory.Exists) { foreach (var fileInfo in dataDirectory.GetFiles()) { fileInfo.Delete(); } } var blocks = new List <GolombRiceFilter>(blockCount); using (var repo = GcsFilterRepository.Open(Path.Combine(SharedFixture.DataDir, nameof(CreateStoreTest)))) { for (var i = 0; i < blockCount; i++) { var txouts = new List <byte[]>(txoutCountPerBlock); for (var j = 0; j < txoutCountPerBlock; j++) { var pushDataBuffer = new byte[avgTxoutPushDataSize]; random.NextBytes(pushDataBuffer); txouts.Add(pushDataBuffer); } var filter = GolombRiceFilter.Build(key, txouts, P); blocks.Add(filter); repo.Put(Hashes.Hash256(filter.Data.ToByteArray()), filter); } } using (var repo = GcsFilterRepository.Open(Path.Combine(SharedFixture.DataDir, nameof(CreateStoreTest)))) { var blockIndexes = Enumerable.Range(0, blockCount).ToList(); blockIndexes.Shuffle(); foreach (var blkIndx in blockIndexes) { var block = blocks[blkIndx]; var blockFilter = block; var blockFilterId = Hashes.Hash256(blockFilter.Data.ToByteArray()); var savedFilter = repo.Get(blockFilterId); var savedFilterId = Hashes.Hash256(savedFilter.Data.ToByteArray()); Assert.Equal(blockFilterId, savedFilterId); } } }
public async Task InconsistentMatureIndexAsync() { var(dir, matureFilters, _) = await GetIndexStorePathsAsync(); var network = Network.Main; var headersChain = new SmartHeaderChain(); var indexStore = new IndexStore(dir, network, headersChain); var dummyFilter = GolombRiceFilter.Parse("00");
public static FilterModel FromStream(Stream stream, Height height) { uint256 blockHash = new uint256(stream.ReadBytes(32)); int filterSize = BitConverter.ToInt32(stream.ReadBytes(4)); byte[] data = stream.ReadBytes(filterSize); GolombRiceFilter filter = filterSize > 0 ? new GolombRiceFilter(data, 20, 1 << 20) : null; return(new FilterModel { BlockHeight = Guard.NotNull(nameof(height), height), BlockHash = blockHash, Filter = filter }); }
public static FilterModel FromLine(string line) { Guard.NotNullOrEmptyOrWhitespace(nameof(line), line); string[] parts = line.Split(':'); if (parts.Length < 5) { throw new ArgumentException(nameof(line), line); } var blockHeight = uint.Parse(parts[0]); var blockHash = uint256.Parse(parts[1]); var filterData = Encoders.Hex.DecodeData(parts[2]); GolombRiceFilter filter = new GolombRiceFilter(filterData, 20, 1 << 20); var prevBlockHash = uint256.Parse(parts[3]); var blockTime = DateTimeOffset.FromUnixTimeSeconds(long.Parse(parts[4])); return(new FilterModel(new SmartHeader(blockHash, prevBlockHash, blockHeight, blockTime), filter)); }
public async Task ReceiveNonMatchingFilterAsync() { var(dir, matureFilters, immatureFilters) = await GetIndexStorePathsAsync(); var network = Network.Main; var headersChain = new SmartHeaderChain(); await using var indexStore = new IndexStore(dir, network, headersChain); var dummyFilter = GolombRiceFilter.Parse("00"); var matureIndexStoreContent = new[] { new FilterModel(new SmartHeader(new uint256(2), new uint256(1), 1, MinutesAgo(30)), dummyFilter), new FilterModel(new SmartHeader(new uint256(3), new uint256(2), 2, MinutesAgo(20)), dummyFilter), }; await File.WriteAllLinesAsync(matureFilters, matureIndexStoreContent.Select(x => x.ToLine())); await indexStore.InitializeAsync(); Assert.Equal(new uint256(3), headersChain.TipHash); Assert.Equal(2u, headersChain.TipHeight); Assert.True(File.Exists(matureFilters)); // mature filters are ok var nonMatchingBlockHashFilter = new FilterModel(new SmartHeader(new uint256(2), new uint256(1), 1, MinutesAgo(30)), dummyFilter); await indexStore.AddNewFiltersAsync(new[] { nonMatchingBlockHashFilter }, CancellationToken.None); Assert.Equal(new uint256(3), headersChain.TipHash); // the filter is not added! Assert.Equal(2u, headersChain.TipHeight); var nonMatchingHeightFilter = new FilterModel(new SmartHeader(new uint256(4), new uint256(3), 37, MinutesAgo(1)), dummyFilter); await indexStore.AddNewFiltersAsync(new[] { nonMatchingHeightFilter }, CancellationToken.None); Assert.Equal(new uint256(3), headersChain.TipHash); // the filter is not added! Assert.Equal(2u, headersChain.TipHeight); var correctFilter = new FilterModel(new SmartHeader(new uint256(4), new uint256(3), 3, MinutesAgo(1)), dummyFilter); await indexStore.AddNewFiltersAsync(new[] { correctFilter }, CancellationToken.None); Assert.Equal(new uint256(4), headersChain.TipHash); // the filter is not added! Assert.Equal(3u, headersChain.TipHeight); }
public static FilterModel FromLine(string line, Height height) { Guard.NotNullOrEmptyOrWhitespace(nameof(line), line); var parts = line.Split(':'); if (parts.Length == 1) // no bech here { return(new FilterModel { BlockHeight = Guard.NotNull(nameof(height), height), BlockHash = new uint256(parts[0]), Filter = null }); } return(new FilterModel { BlockHeight = Guard.NotNull(nameof(height), height), BlockHash = new uint256(parts[0]), Filter = GolombRiceFilter.Parse(parts[1]) }); }
public static FilterModel GetStartingFilter(Network network) { var startingHeader = SmartHeader.GetStartingHeader(network); if (network == Network.Main) { return(FilterModel.FromLine($"{startingHeader.Height}:{startingHeader.BlockHash}:fd37025542ac13565ae0612c2d2c178df07d05402c527fd91942a68300e20853315e278ebc861f7ec7e20ee5c7ea39197ab66ec38c6cacfb7a960e87eafe739fbb58e703811b2ab0cc9ac55c34f3a6d70536776a34679680d5539e3ff089a08d6fed732e3c26e4b5527790cd14a7bd83c8142b5cf4f10fbd09baed653cb4ebf9100bcc5d30878b1aaa01a00230d505004e5d686fd752ec8624f88d640867cab753ca1d3b7829b05a2c20606279242688ba319c2173a5a885399b58c45127bdf976c4d45e79425ff809da944d30405f929c10186e1666bb444cdeece71133049e5e44a265d44c4122e83919d6573220f3b9bdf38ee3037e91b0ae0485707e9a8755dd530fff74338652b55aa547f3c505db5f0a1b92f95fd95852d48167ef857d55500dfb8c7209f05c1669a73f15e7bb26537462a05da4f0d803315dd43630b23019be48a556ebe50752bdd37080460254e4d6d92be80ab0ed8efd882fd0c360fbf00b387b6e5ef4d9a27267b2f51151684fa7cf3064f8e2fc918843215512c838fdd35270da33df3d203e02539f1b030863619a4fdd93b7771de31d35436a50c904225942fda79e2606a21d64dbb110c79f202d10acd94a2ac7cf24cff2dc816ac6828941abec3d9ae721c09f5a2070d3a99aba0fb1e33d7de26c3c2b7f40b002719138826a670c38d56fe5a67e827bf48865f2c34e4eafa68486ecb419118144d212d5a10ee41f26c07b2456c5a0b74fb86cb4eaec27ac2cf80847e57d711754589716c70eea0d1c595b6dd413aa588542acc5ea9d5c102f3b4aac876bb91f57c1eea063d520517bd0c48e2d79ec2164525e1c24a34e8f50e4256790395fcdd5c71b50949738ab92d44476c2073e16bedb4b31517d2264ae4d824b0990fe316919c432edc9f71c1a6a21937786b68e9e11f788fba04ad1253cfcd3148698ec8a4a1054a33aa51adcdb61c922d44ebefb6dc1aab67d07b48bb702606047230e4439bec8d1b371683be41617e4f163509b09f5cd6577956b1857a338764486017949f594c90be92783776ff0ba0d6d517d18d5842c36eee044290df2ca0ad3818f3bcd874ad23e41a77c441da273f20e3472801a4892b635d3d2a0b76ca1c0025b5283850897993410f5c654527ebfc446445ca10d0264c964b86719a108179f303165b3b5f53441d9fe9f6e1a1ae2d2d61c6c34752ca9b02b6c8fb2166dba95370ad1cace915e90cab14797a33be4454fe44dfecb015c59e76b60336192b262169db6461b26da3b03cdcca4c185e7d76bef944ea02a350449e9119aeb82ac094b87cd3fec4bd289c55bf008e3eb614017240c73fb3440de1cbd0f1e634bd997f1c10ccf97598b8d0144bd8608999a6420ff65cfb17f275b33e16d08bd5f01ff2a98b02f3cc84b1931b68b4d47ebab850824d55029e2deaab509bbe09b698675add8a5119630207c41c54a9503ea2ab15ce4446504ce19c52313733a95fed0dea0f9e56c5658d52d43f09b7a246140dac1e49aeb00d7d0b50aac0fab135249f523ee1fc59d8d2c18af69db23fc26a006451e132448bc7b671fa864897a8e23b1bf4b97397d091f8106e12174efd82e7f4479aab210f488cd4cb45a093392ecab407ad3a8836c771ec87d36d41aeeac20c36b3d39c87ea1eaba86411a78ca33f658cceadb903ec7482a395f11b7aa54eecd9da1a9a542695723fff6056ac736b788abd2498488a6737236d70e7aceecdc71e4878c2f1e390866020cf20681230f3858dd7ae44b9bdb0a3726b12db3b42b2863cf03d3e34b733d9c63e92c5b07bf942dc740a2c902621c1f74c4e00e3a36002fb8a24febc89aa5350544566de06ba81aa915e115368aabe7fdedb676082abac78a1b216d7e56c8ed0da8ada71bc2f46ade811233f9568308136700e21b5941286d0d48fe43009b48161837e5255e639d8a4225928a2637d745cbc580c9b14c50860dba2f1be321463e47cb4914c81c7b81206090aa23a73103f139dfca303447cc7140eef9a5f5235675073cdcd5635abad96de169975f2cf06689c9dc9c9d7557d504c7ec1541b493769a2106537d56d489f8800d5d76870c716678ff3672c76b8a179b0a601858b8973ed59a9ecc13f115b44d2a3251e658b95bd5f3f1aa21c57a6eb960dcbf968890:{startingHeader.PrevHash}:{startingHeader.BlockTime.ToUnixTimeSeconds()}")); } else if (network == Network.TestNet) { return(FilterModel.FromLine($"{startingHeader.Height}:{startingHeader.BlockHash}:00000000000f0d5edcaeba823db17f366be49a80d91d15b77747c2e017b8c20a:{startingHeader.PrevHash}:{startingHeader.BlockTime.ToUnixTimeSeconds()}")); } else if (network == Network.RegTest) { GolombRiceFilter filter = IndexBuilderService.CreateDummyEmptyFilter(startingHeader.BlockHash); return(FilterModel.FromLine($"{startingHeader.Height}:{startingHeader.BlockHash}:{filter}:{startingHeader.PrevHash}:{startingHeader.BlockTime.ToUnixTimeSeconds()}")); } else { throw new NotSupportedNetworkException(network); } }
public static FilterModel GetStartingFilter(Network network) { var startingHeader = SmartHeader.GetStartingHeader(network); if (network == Network.Main) { return(FilterModel.FromLine($"{startingHeader.Height}:{startingHeader.BlockHash}:02832810ec08a0:{startingHeader.PrevHash}:{startingHeader.BlockTime.ToUnixTimeSeconds()}")); } else if (network == Network.TestNet) { return(FilterModel.FromLine($"{startingHeader.Height}:{startingHeader.BlockHash}:00000000000f0d5edcaeba823db17f366be49a80d91d15b77747c2e017b8c20a:{startingHeader.PrevHash}:{startingHeader.BlockTime.ToUnixTimeSeconds()}")); } else if (network == Network.RegTest) { GolombRiceFilter filter = IndexBuilderService.CreateDummyEmptyFilter(startingHeader.BlockHash); return(FilterModel.FromLine($"{startingHeader.Height}:{startingHeader.BlockHash}:{filter.ToString()}:{startingHeader.PrevHash}:{startingHeader.BlockTime.ToUnixTimeSeconds()}")); } else { throw new NotSupportedNetworkException(network); } }
public static FilterModel GetStartingFilter(Network network) { var startingHeader = SmartHeader.GetStartingHeader(network); if (network == NBitcoin.Altcoins.Litecoin.Instance.Mainnet) { return(FilterModel.FromLine($"{startingHeader.Height}:{startingHeader.BlockHash}:{startingHeader.BlockHash}:{startingHeader.PrevHash}:{startingHeader.BlockTime.ToUnixTimeSeconds()}")); } else if (network == NBitcoin.Altcoins.Litecoin.Instance.Testnet) { return(FilterModel.FromLine($"{startingHeader.Height}:{startingHeader.BlockHash}:{startingHeader.BlockHash}:{startingHeader.PrevHash}:{startingHeader.BlockTime.ToUnixTimeSeconds()}")); } else if (network == NBitcoin.Altcoins.Litecoin.Instance.Regtest) { GolombRiceFilter filter = IndexBuilderService.CreateDummyEmptyFilter(startingHeader.BlockHash); return(FilterModel.FromLine($"{startingHeader.Height}:{startingHeader.BlockHash}:{filter}:{startingHeader.PrevHash}:{startingHeader.BlockTime.ToUnixTimeSeconds()}")); } else { throw new NotSupportedNetworkException(network); } }
public BlockFilter(GolombRiceFilter filter, List <byte[]> data) { Filter = filter; Data = data; }
public FilterModel(SmartHeader header, GolombRiceFilter filter) { Header = header; _filter = new Lazy <GolombRiceFilter>(filter); }
public void FalsePositivesTest() { // Given this library can be used for building and query filters for each block of // the bitcoin's blockchain, we must be sure it performs well, specially in the queries. // Considering a 4MB block (overestimated) with an average transaction size of 250 bytes (underestimated) // gives us 16000 transactions (this is about 27 tx/sec). Assuming 2.5 txouts per tx we have 83885 txouts // per block. const byte P = 20; const int blockCount = 100; const int maxBlockSize = 4 * 1000 * 1000; const int avgTxSize = 250; // Currently the average is around 1kb. const int txoutCountPerBlock = maxBlockSize / avgTxSize; const int avgTxoutPushDataSize = 20; // P2PKH scripts has 20 bytes. const int walletAddressCount = 1000; // We estimate that our user will have 1000 addresses. var key = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 }; // Generation of data to be added into the filter var random = new Random(); var sw = new Stopwatch(); var blocks = new List <BlockFilter>(blockCount); for (var i = 0; i < blockCount; i++) { var txouts = new List <byte[]>(txoutCountPerBlock); for (var j = 0; j < txoutCountPerBlock; j++) { var pushDataBuffer = new byte[avgTxoutPushDataSize]; random.NextBytes(pushDataBuffer); txouts.Add(pushDataBuffer); } sw.Start(); var filter = GolombRiceFilter.Build(key, txouts, P); sw.Stop(); blocks.Add(new BlockFilter(filter, txouts)); } sw.Reset(); var walletAddresses = new List <byte[]>(walletAddressCount); var falsePositiveCount = 0; for (var i = 0; i < walletAddressCount; i++) { var walletAddress = new byte[avgTxoutPushDataSize]; random.NextBytes(walletAddress); walletAddresses.Add(walletAddress); } sw.Start(); // Check that the filter can match every single txout in every block. foreach (var block in blocks) { if (block.Filter.MatchAny(walletAddresses, key)) { falsePositiveCount++; } } sw.Stop(); Assert.True(falsePositiveCount < 5); // Filter has to mat existing values sw.Start(); var falseNegativeCount = 0; // Check that the filter can match every single txout in every block. foreach (var block in blocks) { if (!block.Filter.MatchAny(block.Data, key)) { falseNegativeCount++; } } sw.Stop(); Assert.Equal(0, falseNegativeCount); }
public void Setup() { Sample = new BlockSample(); Sample.Download(); BlockFilter = GolombRiceFilterBuilder.BuildBasicFilter(Sample.BigBlock); }
public void Synchronize() { Interlocked.Exchange(ref _running, 1); Task.Run(async() => { try { var blockCount = await RpcClient.GetBlockCountAsync(); var isIIB = true; // Initial Index Building phase while (IsRunning) { try { // If stop was requested return. if (IsRunning == false) { return; } var height = StartingHeight; uint256 prevHash = null; using (await IndexLock.LockAsync()) { if (Index.Count != 0) { var lastIndex = Index.Last(); height = lastIndex.BlockHeight + 1; prevHash = lastIndex.BlockHash; } } if (blockCount - (int)height <= 100) { isIIB = false; } Block block = null; try { block = await RpcClient.GetBlockAsync(height); } catch (RPCException) // if the block didn't come yet { await Task.Delay(1000); continue; } if (prevHash != null) { // In case of reorg: if (prevHash != block.Header.HashPrevBlock && !isIIB) // There is no reorg in IIB { Logger.LogInfo <IndexBuilderService>($"REORG Invalid Block: {prevHash}"); // 1. Rollback index using (await IndexLock.LockAsync()) { Index.RemoveLast(); } // 2. Serialize Index. (Remove last line.) var lines = File.ReadAllLines(IndexFilePath); File.WriteAllLines(IndexFilePath, lines.Take(lines.Length - 1).ToArray()); // 3. Rollback Bech32UtxoSet if (Bech32UtxoSetHistory.Count != 0) { Bech32UtxoSetHistory.Last().Rollback(Bech32UtxoSet); // The Bech32UtxoSet MUST be recovered to its previous state. Bech32UtxoSetHistory.RemoveLast(); // 4. Serialize Bech32UtxoSet. await File.WriteAllLinesAsync(Bech32UtxoSetFilePath, Bech32UtxoSet .Select(entry => entry.Key.Hash + ":" + entry.Key.N + ":" + ByteHelpers.ToHex(entry.Value.ToCompressedBytes()))); } // 5. Skip the current block. continue; } } if (!isIIB) { if (Bech32UtxoSetHistory.Count >= 100) { Bech32UtxoSetHistory.RemoveFirst(); } Bech32UtxoSetHistory.Add(new ActionHistoryHelper()); } var scripts = new HashSet <Script>(); foreach (var tx in block.Transactions) { for (int i = 0; i < tx.Outputs.Count; i++) { var output = tx.Outputs[i]; if (!output.ScriptPubKey.IsPayToScriptHash && output.ScriptPubKey.IsWitness) { var outpoint = new OutPoint(tx.GetHash(), i); Bech32UtxoSet.Add(outpoint, output.ScriptPubKey); if (!isIIB) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Add, outpoint, output.ScriptPubKey); } scripts.Add(output.ScriptPubKey); } } foreach (var input in tx.Inputs) { var found = Bech32UtxoSet.SingleOrDefault(x => x.Key == input.PrevOut); if (found.Key != default) { Script val = Bech32UtxoSet[input.PrevOut]; Bech32UtxoSet.Remove(input.PrevOut); if (!isIIB) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Remove, input.PrevOut, val); } scripts.Add(found.Value); } } } // https://github.com/bitcoin/bips/blob/master/bip-0158.mediawiki // The parameter k MUST be set to the first 16 bytes of the hash of the block for which the filter // is constructed.This ensures the key is deterministic while still varying from block to block. var key = block.GetHash().ToBytes().Take(16).ToArray(); GolombRiceFilter filter = null; if (scripts.Count != 0) { filter = GolombRiceFilter.Build(key, scripts.Select(x => x.ToCompressedBytes())); } var filterModel = new FilterModel { BlockHash = block.GetHash(), BlockHeight = height, Filter = filter }; await File.AppendAllLinesAsync(IndexFilePath, new[] { filterModel.ToLine() }); using (await IndexLock.LockAsync()) { Index.Add(filterModel); } if (File.Exists(Bech32UtxoSetFilePath)) { File.Delete(Bech32UtxoSetFilePath); } await File.WriteAllLinesAsync(Bech32UtxoSetFilePath, Bech32UtxoSet .Select(entry => entry.Key.Hash + ":" + entry.Key.N + ":" + ByteHelpers.ToHex(entry.Value.ToCompressedBytes()))); if (blockCount - height <= 3 || height % 100 == 0) // If not close to the tip, just log debug. { Logger.LogInfo <IndexBuilderService>($"Created filter for block: {height}."); } else { Logger.LogDebug <IndexBuilderService>($"Created filter for block: {height}."); } } catch (Exception ex) { Logger.LogDebug <IndexBuilderService>(ex); } } } finally { if (IsStopping) { Interlocked.Exchange(ref _running, 3); } } }); }
public void Synchronize() { Task.Run(async() => { try { if (Interlocked.Read(ref _runner) >= 2) { return; } Interlocked.Increment(ref _runner); while (Interlocked.Read(ref _runner) != 1) { await Task.Delay(100); } if (Interlocked.Read(ref _running) >= 2) { return; } try { Interlocked.Exchange(ref _running, 1); var isImmature = false; // The last 100 blocks are reorgable. (Assume it is mature at first.) SyncInfo syncInfo = null; while (IsRunning) { try { // If we didn't yet initialized syncInfo, do so. if (syncInfo is null) { syncInfo = await GetSyncInfoAsync(); } Height heightToRequest = StartingHeight; uint256 currentHash = null; using (await IndexLock.LockAsync()) { if (Index.Count != 0) { var lastIndex = Index.Last(); heightToRequest = lastIndex.BlockHeight + 1; currentHash = lastIndex.BlockHash; } } // If not synchronized or already 5 min passed since last update, get the latest blockchain info. if (!syncInfo.IsCoreSynchornized || (syncInfo.BlockchainInfoUpdated - DateTimeOffset.UtcNow) > TimeSpan.FromMinutes(5)) { syncInfo = await GetSyncInfoAsync(); } if (syncInfo.BlockCount - heightToRequest <= 100) { // Both Wasabi and our Core node is in sync. Start doing stuff through P2P from now on. if (syncInfo.IsCoreSynchornized && syncInfo.BlockCount == heightToRequest - 1) { syncInfo = await GetSyncInfoAsync(); // Double it to make sure not to accidentally miss any notification. if (syncInfo.IsCoreSynchornized && syncInfo.BlockCount == heightToRequest - 1) { // Mark the process notstarted, so it can be started again and finally block can mark it is stopped. Interlocked.Exchange(ref _running, 0); return; } } // Mark the synchronizing process is working with immature blocks from now on. isImmature = true; } Block block = await RpcClient.GetBlockAsync(heightToRequest); // Reorg check, except if we're requesting the starting height, because then the "currentHash" wouldn't exist. if (heightToRequest != StartingHeight && currentHash != block.Header.HashPrevBlock) { // Reorg can happen only when immature. (If it'd not be immature, that'd be a huge issue.) if (isImmature) { await ReorgOneAsync(); } else { Logger.LogCritical <IndexBuilderService>("This is something serious! Over 100 block reorg is noticed! We cannot handle that!"); } // Skip the current block. continue; } if (isImmature) { PrepareBech32UtxoSetHistory(); } var scripts = new HashSet <Script>(); foreach (var tx in block.Transactions) { // If stop was requested return. // Because this tx iteration can take even minutes // It doesn't need to be accessed with a thread safe fasion with Interlocked through IsRunning, this may have some performance benefit if (_running != 1) { return; } for (int i = 0; i < tx.Outputs.Count; i++) { var output = tx.Outputs[i]; if (output.ScriptPubKey.IsScriptType(ScriptType.P2WPKH)) { var outpoint = new OutPoint(tx.GetHash(), i); Bech32UtxoSet.Add(outpoint, output.ScriptPubKey); if (isImmature) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Add, outpoint, output.ScriptPubKey); } scripts.Add(output.ScriptPubKey); } } foreach (var input in tx.Inputs) { OutPoint prevOut = input.PrevOut; if (Bech32UtxoSet.TryGetValue(prevOut, out Script foundScript)) { Bech32UtxoSet.Remove(prevOut); if (isImmature) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Remove, prevOut, foundScript); } scripts.Add(foundScript); } } } GolombRiceFilter filter = null; if (scripts.Count != 0) { filter = new GolombRiceFilterBuilder() .SetKey(block.GetHash()) .SetP(20) .SetM(1 << 20) .AddEntries(scripts.Select(x => x.ToCompressedBytes())) .Build(); } var filterModel = new FilterModel { BlockHash = block.GetHash(), BlockHeight = heightToRequest, Filter = filter }; await File.AppendAllLinesAsync(IndexFilePath, new[] { filterModel.ToHeightlessLine() }); using (await IndexLock.LockAsync()) { Index.Add(filterModel); } if (File.Exists(Bech32UtxoSetFilePath)) { File.Delete(Bech32UtxoSetFilePath); } await File.WriteAllLinesAsync(Bech32UtxoSetFilePath, Bech32UtxoSet .Select(entry => entry.Key.Hash + ":" + entry.Key.N + ":" + ByteHelpers.ToHex(entry.Value.ToCompressedBytes()))); // If not close to the tip, just log debug. // Use height.Value instead of simply height, because it cannot be negative height. if (syncInfo.BlockCount - heightToRequest.Value <= 3 || heightToRequest % 100 == 0) { Logger.LogInfo <IndexBuilderService>($"Created filter for block: {heightToRequest}."); } else { Logger.LogDebug <IndexBuilderService>($"Created filter for block: {heightToRequest}."); } } catch (Exception ex) { Logger.LogDebug <IndexBuilderService>(ex); } } } finally { Interlocked.CompareExchange(ref _running, 3, 2); // If IsStopping, make it stopped. Interlocked.Decrement(ref _runner); } } catch (Exception ex) { Logger.LogError <IndexBuilderService>($"Synchronization attempt failed to start: {ex}"); } }); }
public void Synchronize() { Interlocked.Exchange(ref _running, 1); Task.Run(async() => { try { var blockCount = await RpcClient.GetBlockCountAsync(); var isIIB = true; // Initial Index Building phase while (IsRunning) { try { // If stop was requested return. if (IsRunning == false) { return; } Height height = StartingHeight; uint256 prevHash = null; using (await IndexLock.LockAsync()) { if (Index.Count != 0) { var lastIndex = Index.Last(); height = lastIndex.BlockHeight + 1; prevHash = lastIndex.BlockHash; } } if (blockCount - height <= 100) { isIIB = false; } Block block = null; try { block = await RpcClient.GetBlockAsync(height); } catch (RPCException) // if the block didn't come yet { await Task.Delay(1000); continue; } if (blockCount - height <= 2) { NewBlock?.Invoke(this, block); } if (!(prevHash is null)) { // In case of reorg: if (prevHash != block.Header.HashPrevBlock && !isIIB) // There is no reorg in IIB { Logger.LogInfo <IndexBuilderService>($"REORG Invalid Block: {prevHash}"); // 1. Rollback index using (await IndexLock.LockAsync()) { Index.RemoveLast(); } // 2. Serialize Index. (Remove last line.) var lines = File.ReadAllLines(IndexFilePath); File.WriteAllLines(IndexFilePath, lines.Take(lines.Length - 1).ToArray()); // 3. Rollback Bech32UtxoSet if (Bech32UtxoSetHistory.Count != 0) { Bech32UtxoSetHistory.Last().Rollback(Bech32UtxoSet); // The Bech32UtxoSet MUST be recovered to its previous state. Bech32UtxoSetHistory.RemoveLast(); // 4. Serialize Bech32UtxoSet. await File.WriteAllLinesAsync(Bech32UtxoSetFilePath, Bech32UtxoSet .Select(entry => entry.Key.Hash + ":" + entry.Key.N + ":" + ByteHelpers.ToHex(entry.Value.ToCompressedBytes()))); } // 5. Skip the current block. continue; } } if (!isIIB) { if (Bech32UtxoSetHistory.Count >= 100) { Bech32UtxoSetHistory.RemoveFirst(); } Bech32UtxoSetHistory.Add(new ActionHistoryHelper()); } var scripts = new HashSet <Script>(); foreach (var tx in block.Transactions) { // If stop was requested return. // Because this tx iteration can take even minutes // It doesn't need to be accessed with a thread safe fasion with Interlocked through IsRunning, this may have some performance benefit if (_running != 1) { return; } for (int i = 0; i < tx.Outputs.Count; i++) { var output = tx.Outputs[i]; if (!output.ScriptPubKey.IsPayToScriptHash && output.ScriptPubKey.IsWitness) { var outpoint = new OutPoint(tx.GetHash(), i); Bech32UtxoSet.Add(outpoint, output.ScriptPubKey); if (!isIIB) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Add, outpoint, output.ScriptPubKey); } scripts.Add(output.ScriptPubKey); } } foreach (var input in tx.Inputs) { OutPoint prevOut = input.PrevOut; if (Bech32UtxoSet.TryGetValue(prevOut, out Script foundScript)) { Bech32UtxoSet.Remove(prevOut); if (!isIIB) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Remove, prevOut, foundScript); } scripts.Add(foundScript); } } } GolombRiceFilter filter = null; if (scripts.Count != 0) { filter = new GolombRiceFilterBuilder() .SetKey(block.GetHash()) .SetP(20) .SetM(1 << 20) .AddEntries(scripts.Select(x => x.ToCompressedBytes())) .Build(); } var filterModel = new FilterModel { BlockHash = block.GetHash(), BlockHeight = height, Filter = filter }; await File.AppendAllLinesAsync(IndexFilePath, new[] { filterModel.ToLine() }); using (await IndexLock.LockAsync()) { Index.Add(filterModel); } if (File.Exists(Bech32UtxoSetFilePath)) { File.Delete(Bech32UtxoSetFilePath); } await File.WriteAllLinesAsync(Bech32UtxoSetFilePath, Bech32UtxoSet .Select(entry => entry.Key.Hash + ":" + entry.Key.N + ":" + ByteHelpers.ToHex(entry.Value.ToCompressedBytes()))); // If not close to the tip, just log debug. // Use height.Value instead of simply height, because it cannot be negative height. if (blockCount - height.Value <= 3 || height % 100 == 0) { Logger.LogInfo <IndexBuilderService>($"Created filter for block: {height}."); } else { Logger.LogDebug <IndexBuilderService>($"Created filter for block: {height}."); } } catch (Exception ex) { Logger.LogDebug <IndexBuilderService>(ex); } } } finally { if (IsStopping) { Interlocked.Exchange(ref _running, 3); } } }); }
public FilterModel(SmartHeader header, GolombRiceFilter filter) { Header = Guard.NotNull(nameof(header), header); Filter = Guard.NotNull(nameof(filter), filter); }