/// <summary> /// Method used to scan all blockchain files, identify transactions and execute callback method for each transaction. /// </summary> /// <param name="datapath">Path to blockchain files.</param> /// <param name="callback">Callback closure method, executed for each found transaction.</param> public void Runner(string datapath, Action <StoredBlock, Transaction> callback) { ConcurrentBag <Task> listOftasks = new ConcurrentBag <Task>(); BlockStore store = new BlockStore(datapath, Network.Main); foreach (var file in new Helper().GetFiles(datapath, store.FileRegex).OrderByDescending(f => f.FullName)) { listOftasks.Add(Task.Run(() => { foreach (var transaction in store.EnumerateFile(file).SelectMany(block => block.Item.Transactions, (block, transactions) => new { block, transactions })) { callback(transaction.block, transaction.transactions); queue.Enqueue(new KeyValuePair <byte[], byte[]>(transaction.transactions.GetHash().ToBytes(), transaction.block.Item.GetHash().ToBytes())); queue.Enqueue(new KeyValuePair <byte[], byte[]>(transaction.block.Item.GetHash().ToBytes(), System.Text.Encoding.UTF8.GetBytes(file.FullName))); } })); } Task.WaitAll(listOftasks.ToArray()); }
private static void InsertMultiThreaded(DirectoryInfo directory, DocumentStore store) { var queue = new BlockingCollection <Tuple <Block, List <Transaction> > >(500); var filesQueue = new BlockingCollection <FileInfo>(); var files = directory.EnumerateFiles("blk*.dat") .OrderByDescending(x => x.Name); foreach (var file in files) { filesQueue.Add(file); } var source = new CancellationTokenSource(); int processedRecords = 0; int insertedRecords = 0; var workers = new List <Task>(); for (int i = 0; i < 3; i++) { var worker = Task.Factory.StartNew(t => { var token = (CancellationToken)t; var bulkInsert = store.BulkInsert(); while (!token.IsCancellationRequested) { // We are waiting for no more than 5 seconds a time. if (!queue.TryTake(out var tuple)) { Thread.Sleep(250); continue; } bulkInsert.Store(tuple.Item1); foreach (Transaction tx in tuple.Item2) { bulkInsert.Store(tx); } Interlocked.Increment(ref insertedRecords); } }, source.Token); workers.Add(worker); } for (int i = 0; i < 10; i++) { var processor = Task.Factory.StartNew(t => { var token = (CancellationToken)t; while (!token.IsCancellationRequested) { // We are waiting for no more than 5 seconds a time. if (!filesQueue.TryTake(out var file)) { Thread.Sleep(250); continue; } Console.WriteLine(); Console.WriteLine($"Processing {file.Name}"); var blockStore = new BlockStore(file.DirectoryName, Network.Main); foreach (var blk in blockStore.EnumerateFile(file)) { if (blk.Item == null) { continue; } try { var blockModel = new Block(blk.Item); var transactions = new List <Transaction>(); foreach (var tx in blk.Item.Transactions) { if (tx == null) { continue; } transactions.Add(new Transaction(blk.Item, tx)); } queue.Add(new Tuple <Block, List <Transaction> >(blockModel, transactions), token); } catch (Exception e) { Console.WriteLine(e); } Interlocked.Increment(ref processedRecords); } } }, source.Token); workers.Add(processor); } while (true) { while (Console.KeyAvailable == false) { Console.Write($"\rProcessed: {processedRecords}, Inserted: {insertedRecords}, In-Queue: {processedRecords-insertedRecords}"); Thread.Sleep(2000); // Loop until input is entered. } var cki = Console.ReadKey(); if (cki.Key == ConsoleKey.Q) { break; } } source.Cancel(); Task.WaitAll(workers.ToArray()); }