public Task SubscribeAsync(NewFilterInput filterInput, object id = null)
 {
     return(base.SubscribeAsync(BuildRequest(filterInput, id)));
 }
 public RpcRequest BuildRequest(NewFilterInput filterInput, object id = null)
 {
     return(_ethLogsSubscriptionRequestBuilder.BuildRequest(filterInput, id));
 }
Beispiel #3
0
        public async Task WritingCustomMessagesToTheQueue()
        {
            // Load config
            //  - this will contain the secrets and connection strings we don't want to hard code
            var    config = TestConfiguration.LoadConfig();
            string azureStorageConnectionString = config["AzureStorageConnectionString"];

            // Create a proxy for the blockchain
            var web3 = new Web3.Web3(TestConfiguration.BlockchainUrls.Infura.Mainnet);

            // Create Queue Factory
            //  - In this sample we're targetting Azure
            //  - The factory communicates with Azure to create and get different queues
            var queueFactory = new AzureSubscriberQueueFactory(azureStorageConnectionString);
            // Create a Queue
            //  - This is where we're going to put the matching event logs
            var queue = await queueFactory.GetOrCreateQueueAsync("makerdaotransferscustom");


            // Create an event subscription specifically for ERC20 Transfers
            // - Passing in the maker dao address to ensure only logs with a matching address are processed
            // - There is an option to pass an implementation of IEventHandlerHistoryRepository in to the constructor
            // - This would record history for each event handler and is used to prevent duplication
            var eventSubscription = new EventSubscription <TransferEventDto>(
                contractAddressesToMatch: new[] { MAKER_CONTRACT_ADDRESS });

            // Create a mapper that will convert the DecodedEvent into a custom message we want on the queue
            // In this sample we're using a subscription that is specific to an EventDTO (EventSubscription<TransferEventDto>)
            // This ensures that the decodedEvent.DecodedEventDto property is populated during processing
            // ( If the subscription is not tied to an EventDTO the decodedEvent.DecodedEventDto property would be null
            //   BUT we can still read the event arguments (aka parameters or topics) from the decodedEvent.Event property)
            var queueMessageMapper = new QueueMessageMapper((decodedEvent) =>
            {
                return(new CustomQueueMessageForTransfers
                {
                    BlockNumber = decodedEvent.Log.BlockNumber.Value.ToString(),
                    TransactionHash = decodedEvent.Log.TransactionHash,
                    LogIndex = decodedEvent.Log.LogIndex.Value.ToString(),
                    Transfer = decodedEvent.DecodedEventDto as TransferEventDto
                });
            });


            // Assign the queue to the event subscription
            // - Matching events will be written to this queue
            // - Pass a custom mapper to create a suitable queue message
            // - Ultimately the message is converted to json
            eventSubscription.AddQueueHandler(queue, queueMessageMapper);

            // Azure storage setup
            // - this example reads and writes block progress to an Azure storage table
            // - to avoid collision with other samples we provide a prefix
            var storageCloudSetup = new CloudTableSetup(azureStorageConnectionString, prefix: $"makerdaotransferscustom");

            // Create a progress repository
            //  - It stores and retrieves the most recent block processed
            var blockProgressRepo = storageCloudSetup.CreateBlockProgressRepository();

            // Create a progress service
            // - This uses the progress repo to dictate what blocks to process next
            // - The MIN_BLOCK_NUMBER dictates the starting point if the progress repo is empty or has fallen too far behind
            var progressService = new BlockProgressService(web3, MIN_BLOCK_NUMBER, blockProgressRepo);

            // Create a filter
            //  - This is essentially the query that is sent to the chain when retrieving logs
            //  - It is OPTIONAL - without it, all logs in the block range are requested
            //  - The filter is invoked before any event subscriptions evaluate the logs
            //  - The subscriptions are free to implement their own matching logic
            //  - In this sample we're only interested in MakerDAO logs
            //  - Therefore it makes sense to restrict the number of logs to retrieve from the chain
            var makerAddressFilter = new NewFilterInput()
            {
                Address = new[] { MAKER_CONTRACT_ADDRESS }
            };

            // Create a log processor
            // - This uses the blockchainProxy to get the logs
            // - It sends each log to the event subscriptions to indicate if the logs matches the subscription criteria
            // - It then allocates matching logs to separate batches per event subscription
            var logProcessor = new BlockRangeLogsProcessor(web3, new[] { eventSubscription }, makerAddressFilter);

            // Create a batch log processor service
            // - It uses the progress service to calculates the block range to progress
            // - It then invokes the log processor - passing in the range to process
            // - It updates progress via the progress service
            var batchProcessorService = new LogsProcessor(logProcessor, progressService, MAX_BLOCKS_PER_BATCH);

            // execute
            try
            {
                // Optional cancellation token
                // - Useful for cancelling long running processing operations
                var ctx = new System.Threading.CancellationTokenSource();

                // instruct the service to get and process the next range of blocks
                // when the rangeProcessed is null - it means there was nothing to process
                var rangeProcessed = await batchProcessorService.ProcessOnceAsync(ctx.Token);

                // ensure we have processed the expected number of events
                // the event subscription has state which can record running totals across many processing batches
                Assert.Equal(11, eventSubscription.State.GetInt("EventsHandled"));
                // get the message count from the queue
                Assert.Equal(11, await queue.GetApproxMessageCountAsync());

                //A sample message body from the queue

                /*
                 * {"BlockNumber":"7540010","TransactionHash":"0x8d58abc578f5e321f2e6b7c0637ccc60fbf62b39b120691cbf19ff201f5069b0","LogIndex":"132","Transfer":{"From":"0x296c61eaf5bea208bbabc65ae01c3bc5270fe386","To":"0x2a8f1a6af55b705b7daee0776d6f97302de2a839","Value":119928660890733235}}
                 */
            }
            finally
            {
                // delete any data from Azure
                await ClearDown(queue, storageCloudSetup, queueFactory);
            }
        }
Beispiel #4
0
        public async Task QueueAllEventsForMakerDAOContract()
        {
            // Load config
            //  - this will contain the secrets and connection strings we don't want to hard code
            var    config = TestConfiguration.LoadConfig();
            string azureStorageConnectionString = config["AzureStorageConnectionString"];

            // Create a proxy for the blockchain
            var web3 = new Web3.Web3(TestConfiguration.BlockchainUrls.Infura.Mainnet);

            // Create Queue Factory
            //  - In this sample we're targetting Azure
            //  - The factory communicates with Azure to create and get different queues
            var queueFactory = new AzureSubscriberQueueFactory(azureStorageConnectionString);
            // Create a Queue
            //  - This is where we're going to put the matching event logs
            var queue = await queueFactory.GetOrCreateQueueAsync("makerdaoevents");

            //  Get the maker DAO contract abi
            //  - from this we're able to match and decode the events in the contract
            var contractAbi = new ABIDeserialiser().DeserialiseContract(MAKER_DAO_ABI);

            // Create an event subscription for these events
            // - Passing in the maker dao address to ensure only logs with a matching address are processed
            // - There is an option to pass an implementation of IEventHandlerHistoryRepository in to the constructor
            // - This would record history for each event handler and is used to prevent duplication
            var eventSubscription = new EventSubscription(contractAbi.Events, new[] { MAKER_CONTRACT_ADDRESS });

            // Assign the queue to the event subscription
            // - Matching events will be written to this queue
            // - By default a generic message is written to the queue
            // - The message contains the raw log (aka FilterLog), decoded event parameter values and event metadata
            // - Therefore the message schema is consistent across all messages sent to any queues
            // - However - should you require your own queue message schema the method below accepts a custom message mapper
            // - Ultimately the message is converted to json
            eventSubscription.AddQueueHandler(queue);

            // Azure storage setup
            // - this example reads and writes block progress to an Azure storage table
            // - to avoid collision with other samples we provide a prefix
            var storageCloudSetup = new CloudTableSetup(azureStorageConnectionString, prefix: $"makerdao");

            // Create a progress repository
            //  - It stores and retrieves the most recent block processed
            var blockProgressRepo = storageCloudSetup.CreateBlockProgressRepository();

            // Create a progress service
            // - This uses the progress repo to dictate what blocks to process next
            // - The MIN_BLOCK_NUMBER dictates the starting point if the progress repo is empty or has fallen too far behind
            var progressService = new BlockProgressService(web3, MIN_BLOCK_NUMBER, blockProgressRepo);

            // Create a filter
            //  - This is essentially the query that is sent to the chain when retrieving logs
            //  - It is OPTIONAL - without it, all logs in the block range are requested
            //  - The filter is invoked before any event subscriptions evaluate the logs
            //  - The subscriptions are free to implement their own matching logic
            //  - In this sample we're only interested in MakerDAO logs
            //  - Therefore it makes sense to restrict the number of logs to retrieve from the chain
            var makerAddressFilter = new NewFilterInput()
            {
                Address = new[] { MAKER_CONTRACT_ADDRESS }
            };

            // Create a log processor
            // - This uses the blockchainProxy to get the logs
            // - It sends each log to the event subscriptions to indicate if the logs matches the subscription criteria
            // - It then allocates matching logs to separate batches per event subscription
            var logProcessor = new BlockRangeLogsProcessor(web3, new[] { eventSubscription }, makerAddressFilter);

            // Create a batch log processor service
            // - It uses the progress service to calculates the block range to progress
            // - It then invokes the log processor - passing in the range to process
            // - It updates progress via the progress service
            var batchProcessorService = new LogsProcessor(logProcessor, progressService, MAX_BLOCKS_PER_BATCH);

            // execute
            try
            {
                // Optional cancellation token
                // - Useful for cancelling long running processing operations
                var ctx = new System.Threading.CancellationTokenSource();

                // instruct the service to get and process the next range of blocks
                // when the rangeProcessed is null - it means there was nothing to process
                var rangeProcessed = await batchProcessorService.ProcessOnceAsync(ctx.Token);

                // ensure we have processed the expected number of events
                // the event subscription has state which can record running totals across many processing batches
                Assert.Equal(16, eventSubscription.State.GetInt("EventsHandled"));
                // get the message count from the queue
                Assert.Equal(16, await queue.GetApproxMessageCountAsync());
            }
            finally
            {
                // delete any data from Azure
                await ClearDown(queue, storageCloudSetup, queueFactory);
            }
        }
Beispiel #5
0
        public async Task <List <EventLog <T> > > GetAllChanges <T>(NewFilterInput filterInput) where T : new()
        {
            var logs = await ethGetLogs.SendRequestAsync(filterInput).ConfigureAwait(false);

            return(DecodeAllEvents <T>(logs));
        }
Beispiel #6
0
        public async Task ExecuteAsync(ILogger logger)
        {
            if (!_config.Enabled)
            {
                logger.LogInformation($"{nameof(ProcessPurchaseOrderEventLogs)} is not enabled - see app settings");
                return;
            }

            const int RequestRetryWeight = 0; // see below for retry algorithm

            var web3 = new Web3.Web3(_eshopConfiguration.EthereumRpcUrl);
            var walletBuyerService        = new WalletBuyerService(web3, _eshopConfiguration.BuyerWalletAddress);
            var purchasingContractAddress = await walletBuyerService.PurchasingQueryAsync();

            var filter = new NewFilterInput {
                Address = new[] { purchasingContractAddress }
            };

            ILog log = logger.ToILog();

            EventLogProcessorHandler <PurchaseOrderCreatedLogEventDTO> poCreatedHandler =
                CreatePurchaseOrderCreatedHandler(logger);

            var logProcessorHandlers = new ProcessorHandler <FilterLog>[]
            { poCreatedHandler };

            IBlockchainProcessingOrchestrator orchestrator = new LogOrchestrator(
                ethApi: web3.Eth,
                logProcessors: logProcessorHandlers,
                filterInput: filter,
                defaultNumberOfBlocksPerRequest: (int)_config.NumberOfBlocksPerBatch,
                retryWeight: RequestRetryWeight);

            IWaitStrategy waitForBlockConfirmationsStrategy = new WaitStrategy();

            ILastConfirmedBlockNumberService lastConfirmedBlockNumberService =
                new LastConfirmedBlockNumberService(
                    web3.Eth.Blocks.GetBlockNumber,
                    waitForBlockConfirmationsStrategy,
                    _config.MinimumBlockConfirmations,
                    log);

            var processor = new BlockchainProcessor(
                orchestrator, BlockProgressRepository, lastConfirmedBlockNumberService);

            var cancellationToken = new CancellationTokenSource(_config.TimeoutMs);

            var currentBlockOnChain = await web3.Eth.Blocks.GetBlockNumber.SendRequestAsync();

            var blockToProcessTo   = currentBlockOnChain.Value - _config.MinimumBlockConfirmations;
            var lastBlockProcessed = await BlockProgressRepository.GetLastBlockNumberProcessedAsync();

            var minStartingBlock = _config.GetMinimumStartingBlock();

            logger.LogInformation(
                $"Processing logs. To Block: {blockToProcessTo},  Last Block Processed: {lastBlockProcessed ?? 0}, Min Block: {minStartingBlock}");

            await processor.ExecuteAsync(
                toBlockNumber : blockToProcessTo,
                cancellationToken : cancellationToken.Token,
                startAtBlockNumberIfNotProcessed : minStartingBlock);
        }
Beispiel #7
0
        public async Task <List <EventLog <List <ParameterOutput> > > > GetAllChangesDefault(NewFilterInput filterInput)
        {
            if (!EventABI.IsFilterInputForEvent(ContractAddress, filterInput))
            {
                throw new Exception("Invalid filter input for current event, the filter input does not belong to this contract");
            }
            var logs = await EthGetLogs.SendRequestAsync(filterInput).ConfigureAwait(false);

            return(EventABI.DecodeAllEventsDefaultTopics(logs));
        }
 public static void SetBlockRange(this NewFilterInput filter, BlockRange range)
 {
     filter.FromBlock = new BlockParameter(range.From);
     filter.ToBlock   = new BlockParameter(range.To);
 }
Beispiel #9
0
 public async Task <List <EventLog <T> > > GetAllChanges <T>(NewFilterInput filterInput) where T : new()
 {
     return(DecodeAllEvents <T>(await GetAllChanges(filterInput)));
 }
 public BlockchainLogProcessor(
     IEventLogProxy eventLogProxy,
     IEnumerable <ILogProcessor> logProcessors,
     NewFilterInput filter) : this(eventLogProxy, logProcessors, filter == null ? null : new NewFilterInput[] { filter })
 {
 }
        public static object GetFirstTopicValue(this NewFilterInput filter, uint topicNumber)
        {
            var topicValues = filter.GetTopicValues(topicNumber);

            return(topicValues.FirstOrDefault());
        }
        public static string GetFirstTopicValueAsString(this NewFilterInput filter, uint topicNumber)
        {
            var filterValue = filter.GetFirstTopicValue(topicNumber);

            return(filterValue?.ToString());
        }
        public static bool IsTopicFiltered(this NewFilterInput filter, uint topicNumber)
        {
            var filterValue = filter.GetFirstTopicValue(topicNumber);

            return(filterValue != null);
        }
Beispiel #14
0
        public override async Task <string> ExecuteAsync(IClient client)
        {
            /* This is the example contract containing an event raised every time we call multiply
             * contract test {
             *
             *  event Multiplied(uint indexed a, address sender);
             *
             *  function multiply(uint a) returns(uint d)
             *  {
             *      Multiplied(a, msg.sender);
             *      return a * 7;
             *
             *  }
             *
             * }*/

            //The contract byte code already compiled
            var contractByteCode =
                "606060405260c08060106000396000f360606040526000357c010000000000000000000000000000000000000000000000000000000090048063c6888fa1146037576035565b005b604b60048080359060200190919050506061565b6040518082815260200191505060405180910390f35b6000817f10f82b5dc139f3677a16d7bfb70c65252e78143313768d2c52e07db775e1c7ab33604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a260078202905060bb565b91905056";
            var minerStart       = new MinerStart(client);
            var minerStartResult = await minerStart.SendRequestAsync();

            //Create a new Eth Send Transanction RPC Handler

            //Create the transaction input for the new contract

            //On transaction input the compiled contract is the Data, together with our sender address
            var transactionInput = new TransactionInput();

            transactionInput.Data = contractByteCode;
            transactionInput.From = "0x12890D2cce102216644c59daE5baed380d84830c";
            // retrieve the transaction hash, as we need to get a transaction sreceipt with the contract address
            var transactionHash =
                await new PersonalSignAndSendTransaction(client).SendRequestAsync(transactionInput, "password");

            //the contract should be mining now

            //Get the transaction receipt using the transactionHash
            var ethGetTransactionReceipt = new EthGetTransactionReceipt(client);
            TransactionReceipt receipt   = null;

            //wait for the contract to be mined to the address
            while (receipt == null)
            {
                receipt = await ethGetTransactionReceipt.SendRequestAsync(transactionHash);
            }

            //sha3 the event call, we can use this to validate our topics

            var eventCallSh3 =
                await new Web3Sha3(client).SendRequestAsync(new HexUTF8String("Multiplied(uint256,address)"));
            //create a filter
            //just listen to anything no more filter topics (ie int indexed number)
            var ethFilterInput = new NewFilterInput();

            ethFilterInput.FromBlock.SetValue(receipt.BlockNumber);
            ethFilterInput.ToBlock = BlockParameter.CreateLatest();
            ethFilterInput.Address = new[] { receipt.ContractAddress };
            //no topics
            //ethFilterInput.Topics = new object[]{};

            var newEthFilter = new EthNewFilter(client);
            var filterId     = await newEthFilter.SendRequestAsync(ethFilterInput);


            //create a transaction which will raise the event
            await SendTransaction(client, transactionInput.From, receipt.ContractAddress, "password");

            //get filter changes
            var ethGetFilterChangesForEthNewFilter = new EthGetFilterChangesForEthNewFilter(client);

            FilterLog[] logs = null;

            while (logs == null || logs.Length < 1)
            {
                //Get the filter changes logs
                logs = await ethGetFilterChangesForEthNewFilter.SendRequestAsync(filterId);

                if (logs.Length > 0)
                {
                    var sb = new StringBuilder();
                    sb.AppendLine("Topic 0: " + logs[0].Topics[0] +
                                  " should be the same as the SH3 encoded event signature " + eventCallSh3);
                    Assert.Equal(logs[0].Topics[0], eventCallSh3);
                    sb.AppendLine("Topic 1: " + logs[0].Topics[1] + " should be 69 hex  0x45, padded");

                    sb.AppendLine("Data " + logs[0].Data + " should be the same as the address padded 32 bytes " +
                                  transactionInput.From);

                    return(sb.ToString());
                }
            }

            var minerStop       = new MinerStop(client);
            var minerStopResult = await minerStop.SendRequestAsync();

            throw new Exception("Execution failed");
        }
 public Task <FilterLog[]> GetLogs(NewFilterInput newFilter, object id = null)
 {
     return(Wrap(async() => await Web3.Eth.Filters.GetLogs
                 .SendRequestAsync(newFilter, id)
                 .ConfigureAwait(false)));
 }
Beispiel #16
0
        public async Task <List <EventLog <List <ParameterOutput> > > > GetAllChangesDefaultAsync(NewFilterInput filterInput)
        {
            if (!EventABI.IsFilterInputForEvent(ContractAddress, filterInput))
            {
                throw new FilterInputNotForEventException();
            }
            var logs = await EthGetLogs.SendRequestAsync(filterInput).ConfigureAwait(false);

            return(EventABI.DecodeAllEventsDefaultTopics(logs));
        }
Beispiel #17
0
 public Task <HexBigInteger> CreateFilterAsync(NewFilterInput newfilterInput)
 {
     return(EthNewFilter.SendRequestAsync(newfilterInput));
 }