public void RealScriptPubKeyFilterTest() { var scripts = new List <Script>(); for (var i = 0; i < 10_000; i++) { var script = new Key().PubKey.GetSegwitAddress(Network.Main).ScriptPubKey; scripts.Add(script); } var key = Hashes.Hash256(Encoding.ASCII.GetBytes("A key for testing")); var builder = new GolombRiceFilterBuilder() .SetKey(key) .SetP(0x20); foreach (var script in scripts) { builder = builder.AddScriptPubkey(script); } var filter = builder.Build(); var keyMatch = key.ToBytes().SafeSubarray(0, 16); foreach (var script in scripts) { var match = filter.MatchAny(new[] { script.ToBytes() }, keyMatch); Assert.True(match); } }
public void Setup() { var keyBuffer = new byte[32]; _random.NextBytes(keyBuffer); var key = new uint256(keyBuffer); _testKey = key.ToBytes().Take(16).ToArray(); var builder = new GolombRiceFilterBuilder() .SetKey(key) .SetP(20); var itemsInFilter = new List <byte[]>(); for (var j = 0; j < N; j++) { var data = new byte[_random.Next(20, 30)]; _random.NextBytes(data); itemsInFilter.Add(data); } builder.AddEntries(itemsInFilter); _sample = itemsInFilter.OrderBy(x => _random.Next()).Take(N / 200).ToArray(); _filter = builder.Build(); }
public void GenerateTestVectorsTest() { var tests = TestCase.read_json("data/bip158_vectors.json"); foreach (var test in tests.Skip(1)) { var i = 0; var testBlockHeight = test[i++]; var testBlockHash = uint256.Parse((string)test[i++]); var testBlock = Block.Parse((string)test[i++]); var testPreviousBasicHeader = uint256.Parse((string)test[i++]); var testPreviousExtHeader = uint256.Parse((string)test[i++]); var testBasicFilter = (string)test[i++]; var testExtFilter = (string)test[i++]; var testBasicHeader = (string)test[i++]; var testExtHeader = (string)test[i++]; var message = (string)test[i++]; var basicFilter = GolombRiceFilterBuilder.BuildBasicFilter(testBlock); Assert.Equal(testBasicFilter, basicFilter.ToString()); Assert.Equal(testBasicHeader, basicFilter.GetHeader(testPreviousBasicHeader).ToString()); var extFilter = GolombRiceFilterBuilder.BuildExtendedFilter(testBlock); Assert.Equal(testExtFilter, extFilter.ToString()); Assert.Equal(testExtHeader, extFilter.GetHeader(testPreviousExtHeader).ToString()); var deserializedBasicFilter = GolombRiceFilter.Parse(testBasicFilter); Assert.Equal(testBasicFilter, deserializedBasicFilter.ToString()); var deserializedExtFilter = GolombRiceFilter.Parse(testExtFilter); Assert.Equal(testExtFilter, deserializedExtFilter.ToString()); } }
public void GenerateTestVectorsTest() { var testLines = File.ReadAllLines("data/bip158_vectors.csv"); foreach (var testLine in testLines.Skip(1)) { var i = 0; var test = testLine.Split(','); var testBlockHeight = int.Parse(test[i++]); var testBlockHash = uint256.Parse(test[i++]); var testBlock = Block.Parse(test[i++]); var testPreviousBasicHeader = uint256.Parse(test[i++]); var testPreviousExtHeader = uint256.Parse(test[i++]); var testBasicFilter = test[i++]; var testExtFilter = test[i++]; var testBasicHeader = test[i++]; var testExtHeader = test[i++]; var basicFilter = GolombRiceFilterBuilder.BuildBasicFilter(testBlock); Assert.Equal(testBasicFilter, basicFilter.ToString()); Assert.Equal(testBasicHeader, basicFilter.GetHeader(testPreviousBasicHeader).ToString()); testExtFilter = !string.IsNullOrEmpty(testExtFilter) ? testExtFilter : "00"; var extFilter = GolombRiceFilterBuilder.BuildExtendedFilter(testBlock); Assert.Equal(testExtFilter, extFilter.ToString()); Assert.Equal(testExtHeader, extFilter.GetHeader(testPreviousExtHeader).ToString()); var deserializedBasicFilter = GolombRiceFilter.Parse(testBasicFilter); Assert.Equal(testBasicFilter, deserializedBasicFilter.ToString()); var deserializedExtFilter = GolombRiceFilter.Parse(testExtFilter); Assert.Equal(testExtFilter, deserializedExtFilter.ToString()); } }
public void CanHandleDuplicatedValuesTest() { var byteArray0 = new byte[] { 1, 2, 3, 4 }; var byteArray1 = new byte[] { 1, 2, 3, 4 }; var byteArray2 = new byte[] { 1, 2, 3, 4 }; var filter = new GolombRiceFilterBuilder() .SetKey(Hashes.Hash256(new byte[] { 99, 99, 99, 99 })) .AddEntries(new[] { byteArray0, byteArray1, byteArray2 }) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray0)) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray1)) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray2)) .Build(); Assert.Equal(1, filter.N); }
public void EdgeCaseSipHashEqualZero() { var dummyScriptPubKey = Encoders.Hex.DecodeData("0009BBE4C2D17185643765C265819BF5261755247D"); var blockHash = Encoders.Hex.DecodeData("CB4D1D1ED725B888173BEF553BBE2BF4237B42364BC90638F0CB040F87B57CD4"); var filter = new GolombRiceFilterBuilder() .SetKey(new uint256(blockHash)) .SetP(20) .SetM(1 << 20) .AddEntries(new[] { dummyScriptPubKey }) .Build(); var scriptPubKey = Encoders.Hex.DecodeData("D432CB07482718ECE932DA6914D1FDC1A8EACE3F127D"); var key = blockHash.SafeSubarray(0, 16); Assert.False(filter.Match(scriptPubKey, key)); }
private FilterModel CreateFiltersWith(IEnumerable <byte[]> scripts) { var keyBuffer = new byte[32]; Random.NextBytes(keyBuffer); var blockHash = new uint256(keyBuffer); var builder = new GolombRiceFilterBuilder() .SetKey(blockHash) .SetP(20); builder.AddEntries(scripts); return(new FilterModel( new SmartHeader(new uint256(blockHash), uint256.One, 0, DateTimeOffset.UtcNow), builder.Build())); }
private FilterModel CreateFiltersWith(IEnumerable <byte[]> scripts) { var keyBuffer = new byte[32]; Random.NextBytes(keyBuffer); var blockHash = new uint256(keyBuffer); var builder = new GolombRiceFilterBuilder() .SetKey(blockHash) .SetP(20); builder.AddEntries(scripts); return(new FilterModel { BlockHeight = 0, BlockHash = new uint256(blockHash), Filter = builder.Build() }); }
public void CanSupportCustomeFiltersTest() { var blockHex = File.ReadAllText("./data/block-testnet-828575.txt"); var block = Block.Parse(blockHex, Network.TestNet); var scripts = new HashSet <Script>(); foreach (var tx in block.Transactions) { for (int i = 0; i < tx.Outputs.Count; i++) { var output = tx.Outputs[i]; if (!output.ScriptPubKey.IsScriptType(ScriptType.P2SH) && output.ScriptPubKey.IsScriptType(ScriptType.Witness)) { var outpoint = new OutPoint(tx.GetHash(), i); scripts.Add(output.ScriptPubKey); } } } var key = block.GetHash(); var testkey = key.ToBytes().SafeSubarray(0, 16); var filter = new GolombRiceFilterBuilder() .SetP(20) .SetM(1U << 20) .SetKey(key) .AddEntries(scripts.Select(x => x.ToCompressedBytes())) .Build(); Assert.Equal("017821b8", filter.ToString()); foreach (var tx in block.Transactions) { for (int i = 0; i < tx.Outputs.Count; i++) { var output = tx.Outputs[i]; if (!output.ScriptPubKey.IsScriptType(ScriptType.P2SH) && output.ScriptPubKey.IsScriptType(ScriptType.Witness)) { Assert.True(filter.Match(output.ScriptPubKey.ToCompressedBytes(), testkey)); } } } }
public void Setup() { var random = new Random(Seed: 145); var keyBuffer = new byte[32]; random.NextBytes(keyBuffer); var key = new uint256(keyBuffer); _builder = new GolombRiceFilterBuilder() .SetKey(key) .SetP(20); _itemsInFilter = new List <byte[]>(); for (var j = 0; j < N; j++) { var data = new byte[random.Next(20, 30)]; random.NextBytes(data); _itemsInFilter.Add(data); } _builder.AddEntries(_itemsInFilter); }
public void BuildFilterAndMatchValues() { var names = from name in new[] { "New York", "Amsterdam", "Paris", "Buenos Aires", "La Habana" } select Encoding.ASCII.GetBytes(name); var key = Hashes.Hash256(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 }); var filter = new GolombRiceFilterBuilder() .SetKey(key) .AddEntries(names) .SetP(0x10) .Build(); var testKey = key.ToBytes().SafeSubarray(0, 16); // The filter should match all the values that were added. foreach (var name in names) { Assert.True(filter.Match(name, testKey)); } // The filter should NOT match any extra value. Assert.False(filter.Match(Encoding.ASCII.GetBytes("Porto Alegre"), testKey)); Assert.False(filter.Match(Encoding.ASCII.GetBytes("Madrid"), testKey)); // The filter should match because it has one element indexed: Buenos Aires. var otherCities = new[] { "La Paz", "Barcelona", "El Cairo", "Buenos Aires", "Asunción" }; var otherNames = from name in otherCities select Encoding.ASCII.GetBytes(name); Assert.True(filter.MatchAny(otherNames, testKey)); // The filter should NOT match because it doesn't have any element indexed. var otherCities2 = new[] { "La Paz", "Barcelona", "El Cairo", "Córdoba", "Asunción" }; var otherNames2 = from name in otherCities2 select Encoding.ASCII.GetBytes(name); Assert.False(filter.MatchAny(otherNames2, testKey)); }
public void CanHandleCustomPandMValuesTest() { var byteArray0 = new byte[] { 1, 2, 3, 4 }; var byteArray1 = new byte[] { 2, 3, 4 }; var byteArray2 = new byte[] { 3, 4 }; var key = Hashes.Hash256(new byte[] { 99, 99, 99, 99 }); var testKey = key.ToBytes().SafeSubarray(0, 16); var filter = new GolombRiceFilterBuilder() .SetKey(key) .SetP(10) .SetM(1U << 10) .AddEntries(new[] { byteArray0, byteArray1, byteArray2 }) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray0)) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray1)) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray2)) .Build(); var filterSize10_10 = filter.ToBytes().Length; Assert.Equal(3, filter.N); Assert.Equal(10, filter.P); Assert.Equal(1U << 10, filter.M); Assert.True(filter.Match(byteArray0, testKey)); Assert.True(filter.Match(byteArray1, testKey)); Assert.True(filter.Match(byteArray2, testKey)); Assert.False(filter.Match(new byte[] { 6, 7, 8 }, testKey)); filter = new GolombRiceFilterBuilder() .SetKey(key) .SetP(10) .SetM(1U << 4) .AddEntries(new[] { byteArray0, byteArray1, byteArray2 }) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray0)) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray1)) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray2)) .Build(); var filterSize10_4 = filter.ToBytes().Length; Assert.Equal(3, filter.N); Assert.Equal(10, filter.P); Assert.Equal(1U << 4, filter.M); Assert.True(filter.Match(byteArray0, testKey)); Assert.True(filter.Match(byteArray1, testKey)); Assert.True(filter.Match(byteArray2, testKey)); Assert.False(filter.Match(new byte[] { 6, 7, 8 }, testKey)); Assert.Equal(filterSize10_4, filterSize10_10); filter = new GolombRiceFilterBuilder() .SetKey(key) .SetP(8) .SetM(1U << 4) .AddEntries(new[] { byteArray0, byteArray1, byteArray2 }) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray0)) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray1)) .AddScriptPubkey(Script.FromBytesUnsafe(byteArray2)) .Build(); var filterSize8_4 = filter.ToBytes().Length; Assert.Equal(3, filter.N); Assert.Equal(8, filter.P); Assert.Equal(1U << 4, filter.M); Assert.True(filter.Match(byteArray0, testKey)); Assert.True(filter.Match(byteArray1, testKey)); Assert.True(filter.Match(byteArray2, testKey)); Assert.False(filter.Match(new byte[] { 6, 7, 8 }, testKey)); Assert.True(filterSize8_4 < filterSize10_10); // filter size depends only on P parameter }
public void FalsePositivesTest() { // Given this library can be used for building and query filters for each block of // the bitcoin's blockchain, we must be sure it performs well, specially in the queries. // Considering a 4MB block (overestimated) with an average transaction size of 250 bytes (underestimated) // gives us 16000 transactions (this is about 27 tx/sec). Assuming 2.5 txouts per tx we have 83885 txouts // per block. const byte P = 20; const int blockCount = 100; const int maxBlockSize = 4_000_000; const int avgTxSize = 250; // Currently the average is around 1kb. const int txoutCountPerBlock = maxBlockSize / avgTxSize; const int avgTxoutPushDataSize = 20; // P2PKH scripts has 20 bytes. const int walletAddressCount = 1_000; // We estimate that our user will have 1000 addresses. var key = Hashes.Hash256(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 }); var testKey = key.ToBytes().SafeSubarray(0, 16); // Generation of data to be added into the filter var random = new Random(); var blocks = new List <BlockFilter>(blockCount); for (var i = 0; i < blockCount; i++) { var builder = new GolombRiceFilterBuilder() .SetKey(key) .SetP(P); var txouts = new List <byte[]>(txoutCountPerBlock); for (var j = 0; j < txoutCountPerBlock; j++) { var pushDataBuffer = new byte[avgTxoutPushDataSize]; random.NextBytes(pushDataBuffer); txouts.Add(pushDataBuffer); } builder.AddEntries(txouts); var filter = builder.Build(); blocks.Add(new BlockFilter(filter, txouts)); } var walletAddresses = new List <byte[]>(walletAddressCount); var falsePositiveCount = 0; for (var i = 0; i < walletAddressCount; i++) { var walletAddress = new byte[avgTxoutPushDataSize]; random.NextBytes(walletAddress); walletAddresses.Add(walletAddress); } // Check that the filter can match every single txout in every block. foreach (var block in blocks) { if (block.Filter.MatchAny(walletAddresses, testKey)) { falsePositiveCount++; } } Assert.True(falsePositiveCount < 5); // Filter has to mat existing values var falseNegativeCount = 0; // Check that the filter can match every single txout in every block. foreach (var block in blocks) { if (!block.Filter.MatchAny(block.Data, testKey)) { falseNegativeCount++; } } Assert.Equal(0, falseNegativeCount); }
public void Build() { GolombRiceFilterBuilder.BuildBasicFilter(Sample.BigBlock); }
public void Setup() { Sample = new BlockSample(); Sample.Download(); BlockFilter = GolombRiceFilterBuilder.BuildBasicFilter(Sample.BigBlock); }
public void Synchronize() { Task.Run(async() => { try { if (Interlocked.Read(ref _workerCount) >= 2) { return; } Interlocked.Increment(ref _workerCount); while (Interlocked.Read(ref _workerCount) != 1) { await Task.Delay(100); } if (IsStopping) { return; } try { Interlocked.Exchange(ref _serviceStatus, Running); SyncInfo syncInfo = null; while (IsRunning) { try { // If we did not yet initialized syncInfo, do so. if (syncInfo is null) { syncInfo = await GetSyncInfoAsync(); } uint currentHeight = 0; uint256 currentHash = null; using (await IndexLock.LockAsync()) { if (Index.Count != 0) { var lastIndex = Index[^ 1]; currentHeight = lastIndex.Header.Height; currentHash = lastIndex.Header.BlockHash; } else { currentHash = StartingHeight == 0 ? uint256.Zero : await RpcClient.GetBlockHashAsync((int)StartingHeight - 1); currentHeight = StartingHeight - 1; } } var coreNotSynced = !syncInfo.IsCoreSynchornized; var tipReached = syncInfo.BlockCount == currentHeight; var isTimeToRefresh = DateTimeOffset.UtcNow - syncInfo.BlockchainInfoUpdated > TimeSpan.FromMinutes(5); if (coreNotSynced || tipReached || isTimeToRefresh) { syncInfo = await GetSyncInfoAsync(); } // If wasabi filter height is the same as core we may be done. if (syncInfo.BlockCount == currentHeight) { // Check that core is fully synced if (syncInfo.IsCoreSynchornized && !syncInfo.InitialBlockDownload) { // Mark the process notstarted, so it can be started again // and finally block can mark it as stopped. Interlocked.Exchange(ref _serviceStatus, NotStarted); return; } else { // Knots is catching up give it a 10 seconds await Task.Delay(10000); continue; } } uint nextHeight = currentHeight + 1; uint256 blockHash = await RpcClient.GetBlockHashAsync((int)nextHeight); VerboseBlockInfo block = await RpcClient.GetVerboseBlockAsync(blockHash); // Check if we are still on the best chain, // if not rewind filters till we find the fork. if (currentHash != block.PrevBlockHash) { Logger.LogWarning("Reorg observed on the network."); await ReorgOneAsync(); // Skip the current block. continue; } var scripts = FetchScripts(block); GolombRiceFilter filter; if (scripts.Any()) { filter = new GolombRiceFilterBuilder() .SetKey(block.Hash) .SetP(20) .SetM(1 << 20) .AddEntries(scripts.Select(x => x.ToCompressedBytes())) .Build(); } else { // We cannot have empty filters, because there was a bug in GolombRiceFilterBuilder that evaluates empty filters to true. // And this must be fixed in a backwards compatible way, so we create a fake filter with a random scp instead. filter = CreateDummyEmptyFilter(block.Hash); } var smartHeader = new SmartHeader(block.Hash, block.PrevBlockHash, nextHeight, block.BlockTime); var filterModel = new FilterModel(smartHeader, filter); await File.AppendAllLinesAsync(IndexFilePath, new[] { filterModel.ToLine() }); using (await IndexLock.LockAsync()) { Index.Add(filterModel); } // If not close to the tip, just log debug. if (syncInfo.BlockCount - nextHeight <= 3 || nextHeight % 100 == 0) { Logger.LogInfo($"Created filter for block: {nextHeight}."); } else { Logger.LogDebug($"Created filter for block: {nextHeight}."); } }
public void Synchronize() { Interlocked.Exchange(ref _running, 1); Task.Run(async() => { try { var blockCount = await RpcClient.GetBlockCountAsync(); var isIIB = true; // Initial Index Building phase while (IsRunning) { try { // If stop was requested return. if (IsRunning == false) { return; } Height height = StartingHeight; uint256 prevHash = null; using (await IndexLock.LockAsync()) { if (Index.Count != 0) { var lastIndex = Index.Last(); height = lastIndex.BlockHeight + 1; prevHash = lastIndex.BlockHash; } } if (blockCount - height <= 100) { isIIB = false; } Block block = null; try { block = await RpcClient.GetBlockAsync(height); } catch (RPCException) // if the block didn't come yet { await Task.Delay(1000); continue; } if (blockCount - height <= 2) { NewBlock?.Invoke(this, block); } if (!(prevHash is null)) { // In case of reorg: if (prevHash != block.Header.HashPrevBlock && !isIIB) // There is no reorg in IIB { Logger.LogInfo <IndexBuilderService>($"REORG Invalid Block: {prevHash}"); // 1. Rollback index using (await IndexLock.LockAsync()) { Index.RemoveLast(); } // 2. Serialize Index. (Remove last line.) var lines = File.ReadAllLines(IndexFilePath); File.WriteAllLines(IndexFilePath, lines.Take(lines.Length - 1).ToArray()); // 3. Rollback Bech32UtxoSet if (Bech32UtxoSetHistory.Count != 0) { Bech32UtxoSetHistory.Last().Rollback(Bech32UtxoSet); // The Bech32UtxoSet MUST be recovered to its previous state. Bech32UtxoSetHistory.RemoveLast(); // 4. Serialize Bech32UtxoSet. await File.WriteAllLinesAsync(Bech32UtxoSetFilePath, Bech32UtxoSet .Select(entry => entry.Key.Hash + ":" + entry.Key.N + ":" + ByteHelpers.ToHex(entry.Value.ToCompressedBytes()))); } // 5. Skip the current block. continue; } } if (!isIIB) { if (Bech32UtxoSetHistory.Count >= 100) { Bech32UtxoSetHistory.RemoveFirst(); } Bech32UtxoSetHistory.Add(new ActionHistoryHelper()); } var scripts = new HashSet <Script>(); foreach (var tx in block.Transactions) { // If stop was requested return. // Because this tx iteration can take even minutes // It doesn't need to be accessed with a thread safe fasion with Interlocked through IsRunning, this may have some performance benefit if (_running != 1) { return; } for (int i = 0; i < tx.Outputs.Count; i++) { var output = tx.Outputs[i]; if (!output.ScriptPubKey.IsPayToScriptHash && output.ScriptPubKey.IsWitness) { var outpoint = new OutPoint(tx.GetHash(), i); Bech32UtxoSet.Add(outpoint, output.ScriptPubKey); if (!isIIB) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Add, outpoint, output.ScriptPubKey); } scripts.Add(output.ScriptPubKey); } } foreach (var input in tx.Inputs) { OutPoint prevOut = input.PrevOut; if (Bech32UtxoSet.TryGetValue(prevOut, out Script foundScript)) { Bech32UtxoSet.Remove(prevOut); if (!isIIB) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Remove, prevOut, foundScript); } scripts.Add(foundScript); } } } GolombRiceFilter filter = null; if (scripts.Count != 0) { filter = new GolombRiceFilterBuilder() .SetKey(block.GetHash()) .SetP(20) .SetM(1 << 20) .AddEntries(scripts.Select(x => x.ToCompressedBytes())) .Build(); } var filterModel = new FilterModel { BlockHash = block.GetHash(), BlockHeight = height, Filter = filter }; await File.AppendAllLinesAsync(IndexFilePath, new[] { filterModel.ToLine() }); using (await IndexLock.LockAsync()) { Index.Add(filterModel); } if (File.Exists(Bech32UtxoSetFilePath)) { File.Delete(Bech32UtxoSetFilePath); } await File.WriteAllLinesAsync(Bech32UtxoSetFilePath, Bech32UtxoSet .Select(entry => entry.Key.Hash + ":" + entry.Key.N + ":" + ByteHelpers.ToHex(entry.Value.ToCompressedBytes()))); // If not close to the tip, just log debug. // Use height.Value instead of simply height, because it cannot be negative height. if (blockCount - height.Value <= 3 || height % 100 == 0) { Logger.LogInfo <IndexBuilderService>($"Created filter for block: {height}."); } else { Logger.LogDebug <IndexBuilderService>($"Created filter for block: {height}."); } } catch (Exception ex) { Logger.LogDebug <IndexBuilderService>(ex); } } } finally { if (IsStopping) { Interlocked.Exchange(ref _running, 3); } } }); }
public void Synchronize() { Task.Run(async () => { try { if (Interlocked.Read(ref _runner) >= 2) { return; } Interlocked.Increment(ref _runner); while (Interlocked.Read(ref _runner) != 1) { await Task.Delay(100); } if (Interlocked.Read(ref _running) >= 2) { return; } try { Interlocked.Exchange(ref _running, 1); var isImmature = false; // The last 100 blocks are reorgable. (Assume it is mature at first.) SyncInfo syncInfo = null; while (IsRunning) { try { // If we did not yet initialized syncInfo, do so. if (syncInfo is null) { syncInfo = await GetSyncInfoAsync(); } uint heightToRequest = StartingHeight; uint256 currentHash = null; using (await IndexLock.LockAsync()) { if (Index.Count != 0) { var lastIndex = Index.Last(); heightToRequest = lastIndex.Header.Height + 1; currentHash = lastIndex.Header.BlockHash; } } // If not synchronized or already 5 min passed since last update, get the latest blockchain info. if (!syncInfo.IsCoreSynchornized || syncInfo.BlockchainInfoUpdated - DateTimeOffset.UtcNow > TimeSpan.FromMinutes(5)) { syncInfo = await GetSyncInfoAsync(); } if (syncInfo.BlockCount - heightToRequest <= 100) { // Both Wasabi and our Core node is in sync. Start doing stuff through P2P from now on. if (syncInfo.IsCoreSynchornized && syncInfo.BlockCount == heightToRequest - 1) { syncInfo = await GetSyncInfoAsync(); // Double it to make sure not to accidentally miss any notification. if (syncInfo.IsCoreSynchornized && syncInfo.BlockCount == heightToRequest - 1) { // Mark the process notstarted, so it can be started again and finally block can mark it is stopped. Interlocked.Exchange(ref _running, 0); return; } } // Mark the synchronizing process is working with immature blocks from now on. isImmature = true; } Block block = await RpcClient.GetBlockAsync(heightToRequest); // Reorg check, except if we're requesting the starting height, because then the "currentHash" wouldn't exist. if (heightToRequest != StartingHeight && currentHash != block.Header.HashPrevBlock) { // Reorg can happen only when immature. (If it'd not be immature, that'd be a huge issue.) if (isImmature) { await ReorgOneAsync(); } else { Logger.LogCritical("This is something serious! Over 100 block reorg is noticed! We cannot handle that!"); } // Skip the current block. continue; } if (isImmature) { PrepareBech32UtxoSetHistory(); } var scripts = new HashSet<Script>(); foreach (var tx in block.Transactions) { // If stop was requested return. // Because this tx iteration can take even minutes // It does not need to be accessed with a thread safe fashion with Interlocked through IsRunning, this may have some performance benefit if (_running != 1) { return; } for (int i = 0; i < tx.Outputs.Count; i++) { var output = tx.Outputs[i]; if (output.ScriptPubKey.IsScriptType(ScriptType.P2WPKH)) { var outpoint = new OutPoint(tx.GetHash(), i); var utxoEntry = new UtxoEntry(outpoint, output.ScriptPubKey); Bech32UtxoSet.Add(outpoint, utxoEntry); if (isImmature) { Bech32UtxoSetHistory.Last().StoreAction(Operation.Add, outpoint, output.ScriptPubKey); } scripts.Add(output.ScriptPubKey); } } foreach (var input in tx.Inputs) { OutPoint prevOut = input.PrevOut; if (Bech32UtxoSet.TryGetValue(prevOut, out UtxoEntry foundUtxoEntry)) { var foundScript = foundUtxoEntry.Script; Bech32UtxoSet.Remove(prevOut); if (isImmature) { Bech32UtxoSetHistory.Last().StoreAction(Operation.Remove, prevOut, foundScript); } scripts.Add(foundScript); } } } GolombRiceFilter filter; if (scripts.Any()) { filter = new GolombRiceFilterBuilder() .SetKey(block.GetHash()) .SetP(20) .SetM(1 << 20) .AddEntries(scripts.Select(x => x.ToCompressedBytes())) .Build(); } else { // We cannot have empty filters, because there was a bug in GolombRiceFilterBuilder that evaluates empty filters to true. // And this must be fixed in a backwards compatible way, so we create a fake filter with a random scp instead. filter = CreateDummyEmptyFilter(block.GetHash()); } var smartHeader = new SmartHeader(block.GetHash(), block.Header.HashPrevBlock, heightToRequest, block.Header.BlockTime); var filterModel = new FilterModel(smartHeader, filter); await File.AppendAllLinesAsync(IndexFilePath, new[] { filterModel.ToLine() }); using (await IndexLock.LockAsync()) { Index.Add(filterModel); } if (File.Exists(Bech32UtxoSetFilePath)) { File.Delete(Bech32UtxoSetFilePath); } var bech32UtxoSetLines = Bech32UtxoSet.Select(entry => entry.Value.Line); // Keep it sync unless you fix the performance issue with async. File.WriteAllLines(Bech32UtxoSetFilePath, bech32UtxoSetLines); // If not close to the tip, just log debug. // Use height.Value instead of simply height, because it cannot be negative height. if (syncInfo.BlockCount - heightToRequest <= 3 || heightToRequest % 100 == 0) { Logger.LogInfo($"Created filter for block: {heightToRequest}."); } else { Logger.LogDebug($"Created filter for block: {heightToRequest}."); } } catch (Exception ex) { Logger.LogDebug(ex); } } } finally { Interlocked.CompareExchange(ref _running, 3, 2); // If IsStopping, make it stopped. Interlocked.Decrement(ref _runner); } } catch (Exception ex) { Logger.LogError($"Synchronization attempt failed to start: {ex}"); } }); }