private static CachedTx AddSingleTransaction(string hash)
        {
            var tx_hash = new Dictionary <string, object>();

            tx_hash.Add("transactionHashes", new string[] { hash });
            //now try add the individual hash
            try
            {
                var txs = RpcHelper.Request <TxDetailResp>("get_transaction_details_by_hashes", tx_hash);
                var transactionsToInsert = new List <CachedTx>();
                foreach (var tx in txs.transactions)
                {
                    var cachedTx = TransactionHelpers.MapTx(tx);
                    return(cachedTx);
                }
            }
            catch (Exception innerex)
            {
                //FAILED
                logger.LogError($"Failed to add hash: {hash}");
                LogException(innerex);
            }
            return(null);
        }
        public static void BuildCache(PerformContext context)
        {
            try
            {
                //first, we need to try again to fetch any failed or missing Tx's...
                using (var db = new LiteDatabase(string.Concat(AppContext.BaseDirectory, @"App_Data/", "failedHashes.db")))
                {
                    var failedTransactions = db.GetCollection <FailedHash>("failed_txs");
                    failedTransactions.EnsureIndex(x => x.height);
                    failedTransactions.EnsureIndex(x => x.hash);
                    var allFails = failedTransactions.FindAll().ToList();
                    foreach (var failedHash in allFails)
                    {
                        using (var txDb = new LiteDatabase(failedHash.DbFile))
                        {
                            var transactions = db.GetCollection <CachedTx>("cached_txs");
                            var date         = DateTime.Now;
                            // Index document using height's, hash and Id
                            transactions.EnsureIndex(x => x.height);
                            transactions.EnsureIndex(x => x.Id);
                            transactions.EnsureIndex(x => x.hash);
                            //try fetch the transaction again...
                            var tx_args = new Dictionary <string, object>();
                            tx_args.Add("transactionHashes", new string[] { failedHash.hash });
                            try
                            {
                                var txs = RpcHelper.Request <TxDetailResp>("get_transaction_details_by_hashes", tx_args);
                                var transactionsToInsert = new List <CachedTx>();
                                foreach (var tx in txs.transactions)
                                {
                                    var cachedTx = TransactionHelpers.MapTx(tx);
                                    //persist tx's to cache
                                    if (cachedTx != null && !transactions.Find(x => x.hash == cachedTx.hash).Any())
                                    {
                                        transactionsToInsert.Add(cachedTx);
                                    }
                                }
                                if (transactionsToInsert.Any())
                                {
                                    transactions.InsertBulk(transactionsToInsert);
                                    logger.Log(LogLevel.Information, $"Added {transactionsToInsert.Count} transactions to cache.");
                                    //remove from failedTx DB
                                    foreach (var tx in transactionsToInsert)
                                    {
                                        var failedTx = failedTransactions.FindOne(x => x.hash == tx.hash);
                                        if (failedTx != null)
                                        {
                                            failedTransactions.Delete(failedTx.Id);
                                        }
                                    }
                                }
                            }
                            catch (Exception ex)
                            {
                                failedHash.FetchAttempts++;
                                logger.Log(LogLevel.Information, $"Failed to add single transaction {failedHash.hash} on attempt number {failedHash.FetchAttempts}.");
                                LogException(ex);
                                //update the failed Tx and restore it in DB to try again
                                failedTransactions.Update(failedHash);
                            }
                        }
                    }
                }

                //now, we're processing the pending transactions, we need to know the current BC Height
                //get bc height from RPC
                var currentHeight = 0;
                try
                {
                    currentHeight = RpcHelper.Request <GetHeightResp>("getheight").Height - 1;
                }
                catch
                {
                    currentHeight = 0;
                }
                var startHeight = 1;
                var endHeight   = Convert.ToInt32(Math.Ceiling((double)(currentHeight / 100) * 100)) + 100;
                logger.Log(LogLevel.Information, $"Processing transactions from blocks {startHeight} to {endHeight}");
                //now, splt the current height into blocks of 100
                for (int i = startHeight; i <= endHeight; i += 100)
                {
                    var start = i;
                    var end   = i + 100 - 1;
                    //retreive, transform and cache the blockchain and store in LiteDB
                    using (var db = new LiteDatabase(string.Concat(AppContext.BaseDirectory, @"App_Data/", "transactions_", start, "-", end, ".db")))
                    {
                        var transactions = db.GetCollection <CachedTx>("cached_txs");
                        var date         = DateTime.Now;
                        // Index document using height's, hash and Id
                        transactions.EnsureIndex(x => x.height);
                        transactions.EnsureIndex(x => x.Id);
                        transactions.EnsureIndex(x => x.hash);

                        try
                        {
                            //get the maxTxId
                            var lastTxId = transactions.Max(x => x.Id).AsInt32;
                            //get the last Tx we cached
                            var lastTx = transactions.FindOne(x => x.Id == lastTxId);
                            if (lastTx != null)
                            {
                                start = lastTx.height;
                                if (start == end)
                                {
                                    logger.Log(LogLevel.Information, $"Already cached transactions from blocks {startHeight} to {endHeight}");
                                    continue; //move to the next file...
                                }
                            }
                            else
                            {
                                start = i;
                            }
                        }
                        catch (Exception ex)
                        {
                            LogException(ex);
                        }
                        var counter  = 1;
                        var gCounter = start;

                        var txHashes = new List <string>();

                        if (currentHeight > start)
                        {
                            if (end > currentHeight)
                            {
                                end = currentHeight;
                            }
                            for (var j = start; j <= end; j++)
                            {
                                var height_args = new int[] { j };
                                var blockHash   = "";
                                try
                                {
                                    blockHash = RpcHelper.RequestJson <HashResp>("on_getblockhash", height_args).result;
                                }
                                catch { }
                                if (!string.IsNullOrEmpty(blockHash))
                                {
                                    //then, get the blockHash for the height we're currently processing...
                                    var hash_args = new Dictionary <string, object>();
                                    hash_args.Add("hash", blockHash);
                                    //if this fails we want it to exit, wait 30 seconds and startup again
                                    txHashes.AddRange(RpcHelper.RequestJson <BlockJsonResp>("f_block_json", hash_args).result.block.transactions.Select(x => x.hash).ToList <string>());
                                    //next, get the block itself and extract all the tx hashes....
                                    if (counter == 50 || gCounter == currentHeight)
                                    {
                                        logger.Log(LogLevel.Information, $"Caching transactions at height {gCounter}");
                                        var tx_args = new Dictionary <string, object>();
                                        tx_args.Add("transactionHashes", txHashes.ToArray());

                                        //ok, so what seems to be happening here is that the query to get multiple block hashes is timing out.
                                        //Two things have been changed
                                        //RPC request timeout has been increased from 60 to 320ms
                                        //secondly, if we get an error here, we revert to trying to fetch this batche of Tx's individually
                                        try
                                        {
                                            var txs = RpcHelper.Request <TxDetailResp>("get_transaction_details_by_hashes", tx_args);
                                            var transactionsToInsert = new List <CachedTx>();
                                            foreach (var tx in txs.transactions)
                                            {
                                                var cachedTx = TransactionHelpers.MapTx(tx);
                                                //persist tx's to cache
                                                if (cachedTx != null && !transactions.Find(x => x.hash == cachedTx.hash).Any())
                                                {
                                                    transactionsToInsert.Add(cachedTx);
                                                }
                                            }
                                            if (transactionsToInsert.Any())
                                            {
                                                transactions.InsertBulk(transactionsToInsert);
                                                logger.Log(LogLevel.Information, $"Added {transactionsToInsert.Count} transactions to cache.");
                                                var distinctTxCount = transactionsToInsert.Select(x => x.height).Distinct().Count();
                                                if (distinctTxCount != 50 && gCounter != currentHeight)
                                                {
                                                    logger.Log(LogLevel.Warning, $"Potentially missing transactions at height {gCounter}, expected min 50, found {distinctTxCount}");
                                                    // so what do we do here, go back and try re-add the individual Tx's ?
                                                    //fetch them all individually and try insert one by one?
                                                }
                                                else
                                                {
                                                    //we've added the 50 hashes, so now clear the Tx Cache
                                                    txHashes.Clear(); // clear the current set of hashes
                                                }
                                            }
                                        }
                                        catch (Exception ex)
                                        {
                                            //so the fetching of TX's has failed, we need to loop the individual hashes and get them one by one to insert
                                            logger.Log(LogLevel.Warning, $"Failed to fetch transactions at {gCounter}, reverting to individual hashes");
                                            foreach (var hash in txHashes)
                                            {
                                                var cachedTx = AddSingleTransaction(hash);
                                                if (cachedTx != null)
                                                {
                                                    if (!transactions.Find(x => x.hash == cachedTx.hash).Any())
                                                    {
                                                        transactions.Insert(cachedTx);
                                                    }
                                                }
                                                else
                                                {
                                                    logger.LogError($"failed to fetch single transaction: {hash}");
                                                    //log the failed hash and come back to try add it later on.
                                                    var failedHash = new FailedHash()
                                                    {
                                                        DbFile        = string.Concat(AppContext.BaseDirectory, @"App_Data/", "transactions_", start, "-", end, ".db"),
                                                        hash          = hash,
                                                        height        = gCounter,
                                                        FetchAttempts = 0
                                                    };
                                                    using (var failedHashDb = new LiteDatabase(string.Concat(AppContext.BaseDirectory, @"App_Data/", "failedHashes.db"))) {
                                                        var failedTransactions = db.GetCollection <FailedHash>("failed_txs");
                                                        failedTransactions.EnsureIndex(x => x.height);
                                                        failedTransactions.EnsureIndex(x => x.hash);
                                                        failedTransactions.Insert(failedHash);
                                                    }
                                                }
                                            }
                                        }
                                        counter = 0;
                                        txHashes.Clear();
                                    }
                                    gCounter++;
                                    counter++;
                                }
                            }
                        }
                        //else there's nothing to do
                    }
                }
            }
            catch (Exception ex)
            {
                LogException(ex);
            }
            finally
            {
                //finally, schedule the next check in 30 seconds time
                BackgroundJob.Schedule(() => BlockchainCache.BuildCache(null), TimeSpan.FromSeconds(30));
            }
        }
        public static void BuildCache(PerformContext context)
        {
            int batchSize = 10;

            try
            {
                //get bc height from RPC
                var currentHeight = 0;
                try
                {
                    currentHeight = RpcHelper.Request <GetHeightResp>("getheight").Height;
                }
                catch
                {
                    currentHeight = 0;
                }
                var startHeight = 1;
                var endHeight   = Convert.ToInt32(Math.Ceiling((double)(currentHeight / 10000) * 10000)) + 10000;
                logger.Log(LogLevel.Information, $"Processing transactions from blocks {startHeight} to {endHeight}");
                var clearCache = false;
                var firstPass  = true;
                //now, splt the current height into blocks of 10000
                for (int i = startHeight; i <= endHeight; i += 10000)
                {
                    var start = i;
                    var end   = i + 10000 - 1;
                    if (clearCache)
                    {
                        if (firstPass) //delete the previous file if we've cought an error
                        {
                            //this file should always exist as it's the one we cought the re-organsisation in
                            System.IO.File.Delete(string.Concat(AppContext.BaseDirectory, @"App_Data/", "transactions_", (start - 10000), "-", (end - 10000), ".db"));
                            firstPass = false;
                        }
                        if (System.IO.File.Exists(string.Concat(AppContext.BaseDirectory, @"App_Data/", "transactions_", start, "-", end, ".db")))
                        {
                            System.IO.File.Delete(string.Concat(AppContext.BaseDirectory, @"App_Data/", "transactions_", start, "-", end, ".db"));
                        }
                        else
                        {
                            //we haven't created this file yet, so we can start to re-cache now...
                        }
                    }
                    else
                    {
                        //retreive, transform and cache the blockchain and store in LiteDB
                        using (var db = new LiteDatabase(string.Concat(AppContext.BaseDirectory, @"App_Data/", "transactions_", start, "-", end, ".db")))
                        {
                            var transactions = db.GetCollection <CachedTx>("cached_txs");
                            var date         = DateTime.Now;
                            // Index document using height's, hash and Id
                            transactions.EnsureIndex(x => x.height);
                            transactions.EnsureIndex(x => x.Id);
                            transactions.EnsureIndex(x => x.hash);

                            try
                            {
                                //get the maxTxId
                                var lastTxId = transactions.Max(x => x.Id).AsInt32;
                                //get the last Tx we cached
                                var lastTx = transactions.FindOne(x => x.Id == lastTxId);
                                if (lastTx != null)
                                {
                                    start = lastTx.height;
                                    var lastHash = lastTx.hash;
                                    if (start == end - 1)
                                    {
                                        logger.Log(LogLevel.Information, $"Already cached transactions from blocks {startHeight} to {endHeight}");
                                        logger.Log(LogLevel.Information, $"Checking for changes to hash '{lastHash}' at height {start}");

                                        //query the node to make sure thew hash at this height still matches the one in the cache...
                                        List <int> blockHeights = new List <int>();
                                        blockHeights.Add(start);
                                        var args = new Dictionary <string, object>();
                                        args.Add("blockHeights", blockHeights);
                                        var block     = RpcHelper.Request <BlockResp>("get_blocks_details_by_heights", args).blocks.FirstOrDefault();
                                        var foundHash = false;
                                        if (block != null)
                                        {
                                            foreach (var transaction in block.transactions)
                                            {
                                                if (transaction.hash == lastTx.hash)
                                                {
                                                    foundHash = true;
                                                }
                                            }
                                        }

                                        if (!foundHash)
                                        {
                                            //we have not found the hash in the block, so something has changed...
                                            //set the clearCache var so it deletes previous cache files on the next pass and then re-caches...
                                            clearCache = true;
                                            logger.Log(LogLevel.Information, $"hash '{lastHash}' not found at height {start}. Recaching...");
                                        }
                                        continue; //continue with the loop to clear the subsequent files...
                                    }
                                }
                                else
                                {
                                    start = i;
                                }
                            }
                            catch (Exception ex)
                            {
                                LogException(ex);
                            }
                            //TODO: This is Currently re-importing the last 1000 blocks in every batch... need to fix that and add in a checking mechanism to
                            //get it re-checking cached files...
                            if (currentHeight > start)
                            {
                                var counter = start;
                                if (end > currentHeight)
                                {
                                    end = currentHeight;
                                }
                                try
                                {
                                    var startBlock = (start / batchSize) * batchSize; //get the first "batch of 1000 to start with
                                    for (var y = startBlock; y <= end; y += batchSize)
                                    {
                                        var        endBlock     = end < y + batchSize ? end : y + batchSize;
                                        List <int> blockHeights = new List <int>();
                                        for (var x = y; x < endBlock; x++)
                                        {
                                            blockHeights.Add(x);
                                        }
                                        if (blockHeights.Any())
                                        {
                                            //fetch the transactions
                                            var args = new Dictionary <string, object>();
                                            args.Add("blockHeights", blockHeights);
                                            var blocksResponse = RpcHelper.Request <BlockResp>("get_blocks_details_by_heights", args);
                                            if (blocksResponse.blocks != null)
                                            {
                                                List <CachedTx> transactionsToInsert = new List <CachedTx>();
                                                foreach (var block in blocksResponse.blocks)
                                                {
                                                    foreach (var transaction in block.transactions)
                                                    {
                                                        var cachedTx = TransactionHelpers.MapTx(transaction);
                                                        //persist tx's to cache
                                                        if (cachedTx != null && !transactions.Find(x => x.hash == cachedTx.hash).Any())
                                                        {
                                                            transactionsToInsert.Add(cachedTx);
                                                        }
                                                    }
                                                }
                                                if (transactionsToInsert.Any())
                                                {
                                                    transactions.InsertBulk(transactionsToInsert);
                                                }
                                            }
                                            else
                                            {
                                                //try again with a smaller batch size, maybe do these in 10 block batches ?
                                            }
                                        }
                                    }
                                }
                                catch (Exception ex)
                                {
                                    LogException(ex);
                                }
                            }
                            //else there's nothing to do
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                LogException(ex);
            }
            finally
            {
                logger.Log(LogLevel.Information, $"Job Completed, rescheduling...");
                //finally, schedule the next check in 30 seconds time
                BackgroundJob.Schedule(() => BlockchainCache.BuildCache(null), TimeSpan.FromSeconds(30));
            }
        }
示例#4
0
        public static void BuildCache(PerformContext context)
        {
            //TODO: Update this to work in a loop and store seperate db files per 10,000 blocks...
            //should improve performance and make re-sync'ing / re-caching much easier...

            /*
             *  Some things to consider though:
             *  1. Need a check mechanism to check that no files in the sequence are missing.
             *  2. The way we check "last cached tx" needs to be changed so it gets the latest file..
             *  3. Will also need to update the Blockchain Controller to get Tx's from the right cache file
             */
            // So, first we need to

            //first, we need to know the current BC Height
            //get bc height from RPC
            var currentHeight = 0;

            try
            {
                currentHeight = RpcHelper.Request <GetHeightResp>("getheight").Height - 1;
            }
            catch
            {
                currentHeight = 0;
            }
            var startHeight = 1;
            var endHeight   = Convert.ToInt32(Math.Ceiling((double)(currentHeight / 10000) * 10000)) + 10000;

            //now, splt the current height into blocks of 10000
            for (int i = startHeight; i <= endHeight; i += 10000)
            {
                var start = i;
                var end   = i + 10000 - 1;
                //retreive, transform and cache the blockchain and store in LiteDB
                using (var db = new LiteDatabase(string.Concat(AppContext.BaseDirectory, @"App_Data\", "transactions_", start, "-", end, ".db")))
                {
                    var transactions = db.GetCollection <CachedTx>("cached_txs");
                    var date         = DateTime.Now;
                    // Index document using height's, hash and Id
                    transactions.EnsureIndex(x => x.height);
                    transactions.EnsureIndex(x => x.Id);
                    transactions.EnsureIndex(x => x.hash);

                    try
                    {
                        //get the maxTxId
                        var lastTxId = transactions.Max(x => x.Id).AsInt32;
                        //get the last Tx we cached
                        var lastTx = transactions.FindOne(x => x.Id == lastTxId);
                        if (lastTx != null)
                        {
                            start = lastTx.height;
                            if (start == end)
                            {
                                continue; //move to the next file...
                            }
                        }
                        else
                        {
                            start = i;
                        }
                    }
                    catch (Exception ex)
                    {
                        //todo: add logging
                    }
                    var counter  = 1;
                    var gCounter = start;

                    var txHashes = new List <string>();

                    if (currentHeight > start)
                    {
                        if (end > currentHeight)
                        {
                            end = currentHeight;
                        }
                        for (var j = start; j <= end; j++)
                        {
                            var height_args = new int[] { j };
                            var blockHash   = "";
                            try
                            {
                                blockHash = RpcHelper.RequestJson <HashResp>("on_getblockhash", height_args).result;
                            }
                            catch { }
                            if (!string.IsNullOrEmpty(blockHash))
                            {
                                //then, get the blockHash for the height we're currently processing...
                                var hash_args = new Dictionary <string, object>();
                                hash_args.Add("hash", blockHash);
                                txHashes.AddRange(RpcHelper.RequestJson <BlockJsonResp>("f_block_json", hash_args).result.block.transactions.Select(x => x.hash).ToList <string>());
                                //next, get the block itself and extract all the tx hashes....

                                if (counter == 50 || gCounter == currentHeight)
                                {
                                    var tx_args = new Dictionary <string, object>();
                                    tx_args.Add("transactionHashes", txHashes.ToArray());
                                    var txs = RpcHelper.Request <TxDetailResp>("get_transaction_details_by_hashes", tx_args);
                                    var transactionsToInsert = new List <CachedTx>();
                                    foreach (var tx in txs.transactions)
                                    {
                                        var cachedTx = TransactionHelpers.MapTx(tx);
                                        //persist tx's to cache
                                        if (cachedTx != null && !transactions.Find(x => x.hash == cachedTx.hash).Any())
                                        {
                                            transactionsToInsert.Add(cachedTx);
                                        }
                                    }
                                    if (transactionsToInsert.Any())
                                    {
                                        transactions.InsertBulk(transactionsToInsert);
                                        //temp monitoring
                                        var heights = transactionsToInsert.Select(x => x.height).Distinct().OrderBy(x => x).ToList();
                                        System.IO.File.AppendAllText(string.Concat(AppContext.BaseDirectory, @"App_Data\", "moniter.txt"), counter + ", " + gCounter + ", " + string.Join(", ", heights) + Environment.NewLine); //output csv of heights captured to check against
                                    }
                                    counter = 0;
                                    txHashes.Clear();
                                }
                                gCounter++;
                                counter++;
                            }
                        }
                    }
                    //else there's nothing to do
                }
            }

            //finally, schedule the next check in 30 seconds time
            BackgroundJob.Schedule(() => BlockchainCache.BuildCache(null), TimeSpan.FromSeconds(30));
        }
示例#5
0
        public static void BuildCache(PerformContext context)
        {
            int batchSize = 100;

            try
            {
                //get bc height from RPC
                var currentHeight = 0;
                try
                {
                    currentHeight = RpcHelper.Request <GetHeightResp>("getheight").Height;
                }
                catch
                {
                    currentHeight = 0;
                }
                var startHeight = 1;
                var endHeight   = Convert.ToInt32(Math.Ceiling((double)(currentHeight / 10000) * 10000)) + 10000;
                logger.Log(LogLevel.Information, $"Processing transactions from blocks {startHeight} to {endHeight}");
                //now, splt the current height into blocks of 10000
                for (int i = startHeight; i <= endHeight; i += 10000)
                {
                    var start = i;
                    var end   = i + 10000 - 1;
                    //retreive, transform and cache the blockchain and store in LiteDB
                    using (var db = new LiteDatabase(string.Concat(AppContext.BaseDirectory, @"App_Data/", "transactions_", start, "-", end, ".db")))
                    {
                        var transactions = db.GetCollection <CachedTx>("cached_txs");
                        var date         = DateTime.Now;
                        // Index document using height's, hash and Id
                        transactions.EnsureIndex(x => x.height);
                        transactions.EnsureIndex(x => x.Id);
                        transactions.EnsureIndex(x => x.hash);

                        try
                        {
                            //get the maxTxId
                            var lastTxId = transactions.Max(x => x.Id).AsInt32;
                            //get the last Tx we cached
                            var lastTx = transactions.FindOne(x => x.Id == lastTxId);
                            if (lastTx != null)
                            {
                                start = lastTx.height;
                                if (start == end - 1)
                                {
                                    logger.Log(LogLevel.Information, $"Already cached transactions from blocks {startHeight} to {endHeight}");
                                    continue; //move to the next file...
                                }
                            }
                            else
                            {
                                start = i;
                            }
                        }
                        catch (Exception ex)
                        {
                            LogException(ex);
                        }
                        //TODO: This is Currently re-importing the last 1000 blocks in every batch... need to fix that and add in a checking mechanism to
                        //get it re-checking cached files...
                        if (currentHeight > start)
                        {
                            var counter = start;
                            if (end > currentHeight)
                            {
                                end = currentHeight;
                            }
                            try
                            {
                                var startBlock = (start / batchSize) * batchSize; //get the first "batch of 1000 to start with
                                for (var y = startBlock; y <= end; y += batchSize)
                                {
                                    var        endBlock     = end < y + batchSize ? end : y + batchSize;
                                    List <int> blockHeights = new List <int>();
                                    for (var x = y; x < endBlock; x++)
                                    {
                                        blockHeights.Add(x);
                                    }
                                    if (blockHeights.Any())
                                    {
                                        //fetch the transactions
                                        var args = new Dictionary <string, object>();
                                        args.Add("blockHeights", blockHeights);
                                        var             blocks = RpcHelper.Request <BlockResp>("get_blocks_details_by_heights", args).blocks;
                                        List <CachedTx> transactionsToInsert = new List <CachedTx>();
                                        foreach (var block in blocks)
                                        {
                                            foreach (var transaction in block.transactions)
                                            {
                                                var cachedTx = TransactionHelpers.MapTx(transaction);
                                                //persist tx's to cache
                                                if (cachedTx != null && !transactions.Find(x => x.hash == cachedTx.hash).Any())
                                                {
                                                    transactionsToInsert.Add(cachedTx);
                                                }
                                            }
                                        }
                                        if (transactionsToInsert.Any())
                                        {
                                            transactions.InsertBulk(transactionsToInsert);
                                        }
                                    }
                                }
                            }
                            catch (Exception ex)
                            {
                                LogException(ex);
                            }
                        }
                        //else there's nothing to do
                    }
                }
            }
            catch (Exception ex)
            {
                LogException(ex);
            }
            finally
            {
                logger.Log(LogLevel.Information, $"Job Completed, rescheduling...");
                //finally, schedule the next check in 30 seconds time
                BackgroundJob.Schedule(() => BlockchainCache.BuildCache(null), TimeSpan.FromSeconds(30));
            }
        }
示例#6
0
        public static void BuildCache(PerformContext context)
        {
            try
            {
                //first, we need to know the current BC Height
                //get bc height from RPC
                var currentHeight = 0;
                try
                {
                    currentHeight = RpcHelper.Request <GetHeightResp>("getheight").Height - 1;
                }
                catch
                {
                    currentHeight = 0;
                }
                var startHeight = 1;
                var endHeight   = Convert.ToInt32(Math.Ceiling((double)(currentHeight / 10000) * 10000)) + 10000;
                //now, splt the current height into blocks of 10000
                for (int i = startHeight; i <= endHeight; i += 10000)
                {
                    var start = i;
                    var end   = i + 10000 - 1;
                    //retreive, transform and cache the blockchain and store in LiteDB
                    using (var db = new LiteDatabase(string.Concat(AppContext.BaseDirectory, @"App_Data\", "transactions_", start, "-", end, ".db")))
                    {
                        var transactions = db.GetCollection <CachedTx>("cached_txs");
                        var date         = DateTime.Now;
                        // Index document using height's, hash and Id
                        transactions.EnsureIndex(x => x.height);
                        transactions.EnsureIndex(x => x.Id);
                        transactions.EnsureIndex(x => x.hash);

                        try
                        {
                            //get the maxTxId
                            var lastTxId = transactions.Max(x => x.Id).AsInt32;
                            //get the last Tx we cached
                            var lastTx = transactions.FindOne(x => x.Id == lastTxId);
                            if (lastTx != null)
                            {
                                start = lastTx.height;
                                if (start == end)
                                {
                                    continue; //move to the next file...
                                }
                            }
                            else
                            {
                                start = i;
                            }
                        }
                        catch (Exception ex)
                        {
                            //todo: add logging
                        }
                        var counter  = 1;
                        var gCounter = start;

                        var txHashes = new List <string>();

                        if (currentHeight > start)
                        {
                            if (end > currentHeight)
                            {
                                end = currentHeight;
                            }
                            for (var j = start; j <= end; j++)
                            {
                                var height_args = new int[] { j };
                                var blockHash   = "";
                                try
                                {
                                    blockHash = RpcHelper.RequestJson <HashResp>("on_getblockhash", height_args).result;
                                }
                                catch { }
                                if (!string.IsNullOrEmpty(blockHash))
                                {
                                    //then, get the blockHash for the height we're currently processing...
                                    var hash_args = new Dictionary <string, object>();
                                    hash_args.Add("hash", blockHash);
                                    //if it fails here we need to exit the loop

                                    txHashes.AddRange(RpcHelper.RequestJson <BlockJsonResp>("f_block_json", hash_args).result.block.transactions.Select(x => x.hash).ToList <string>());
                                    if (counter == 50 || gCounter == currentHeight)
                                    {
                                        var tx_args = new Dictionary <string, object>();
                                        tx_args.Add("transactionHashes", txHashes.ToArray());
                                        var txs = RpcHelper.Request <TxDetailResp>("get_transaction_details_by_hashes", tx_args);
                                        var transactionsToInsert = new List <CachedTx>();
                                        foreach (var tx in txs.transactions)
                                        {
                                            var cachedTx = TransactionHelpers.MapTx(tx);
                                            //persist tx's to cache
                                            if (cachedTx != null && !transactions.Find(x => x.hash == cachedTx.hash).Any())
                                            {
                                                transactionsToInsert.Add(cachedTx);
                                            }
                                        }
                                        if (transactionsToInsert.Any())
                                        {
                                            transactions.InsertBulk(transactionsToInsert);
                                            var heights = transactionsToInsert.Select(x => x.height).Distinct().OrderBy(x => x).ToList();
                                        }
                                        counter = 0;
                                        txHashes.Clear();
                                    }
                                    gCounter++;
                                    counter++;
                                }
                            }
                        }
                        //else there's nothing to do
                    }
                }
            }
            catch (Exception ex)
            {
                //todo: add logging
            }
            finally
            {
                //finally, schedule the next check in 30 seconds time
                BackgroundJob.Schedule(() => BlockchainCache.BuildCache(null), TimeSpan.FromSeconds(30));
            }
        }