Exemple #1
0
        public ProcessingResult Process(string code, ProcessingOptions options)
        {
            options = options ?? new ProcessingOptions();
            var kind = options.ScriptMode ? SourceCodeKind.Script : SourceCodeKind.Regular;
            var sourceLanguage = _languages.Single(l => l.Identifier == options.SourceLanguage);

            var syntaxTree = sourceLanguage.ParseText(code, kind);

            var stream = new MemoryStream();
            var compilation = sourceLanguage
                .CreateLibraryCompilation("Test", options.OptimizationsEnabled)
                .AddReferences(_references)
                .AddSyntaxTrees(syntaxTree);

            var emitResult = _roslynAbstraction.Emit(compilation, stream);

            if (!emitResult.Success)
                return new ProcessingResult(null, emitResult.Diagnostics.Select(d => new ProcessingResultDiagnostic(d)));

            stream.Seek(0, SeekOrigin.Begin);

            var resultWriter = new StringWriter();
            var decompiler = _decompilers.Single(d => d.Language == options.TargetLanguage);
            decompiler.Decompile(stream, resultWriter);
            return new ProcessingResult(
                resultWriter.ToString(),
                emitResult.Diagnostics.Select(d => new ProcessingResultDiagnostic(d))
            );
        }
Exemple #2
0
 public static bool IsProducingBlock(this ProcessingOptions processingOptions) => (processingOptions & ProcessingOptions.ProducingBlock) == ProcessingOptions.ProducingBlock;
Exemple #3
0
        // This function will try to parse TLS hello frame and fill details in provided info structure.
        // If frame was fully processed without any error, function returns true.
        // Otherwise it returns false and info may have partial data.
        // It is OK to call it again if more data becomes available.
        // It is also possible to limit what information is processed.
        // If callback delegate is provided, it will be called on ALL extensions.
        public static bool TryGetFrameInfo(ReadOnlySpan <byte> frame, ref TlsFrameInfo info, ProcessingOptions options = ProcessingOptions.All, HelloExtensionCallback?callback = null)
        {
            const int HandshakeTypeOffset = 5;

            if (frame.Length < HeaderSize)
            {
                return(false);
            }

            // This will not fail since we have enough data.
            bool gotHeader = TryGetFrameHeader(frame, ref info.Header);

            Debug.Assert(gotHeader);

            info.SupportedVersions = info.Header.Version;

            if (info.Header.Type == TlsContentType.Alert)
            {
                TlsAlertLevel       level       = default;
                TlsAlertDescription description = default;
                if (TryGetAlertInfo(frame, ref level, ref description))
                {
                    info.AlertDescription = description;
                    return(true);
                }

                return(false);
            }

            if (info.Header.Type != TlsContentType.Handshake || frame.Length <= HandshakeTypeOffset)
            {
                return(false);
            }

            info.HandshakeType = (TlsHandshakeType)frame[HandshakeTypeOffset];

            // Check if we have full frame.
            bool isComplete = frame.Length >= HeaderSize + info.Header.Length;

            if (((int)info.Header.Version >= (int)SslProtocols.Tls) &&
                (info.HandshakeType == TlsHandshakeType.ClientHello || info.HandshakeType == TlsHandshakeType.ServerHello))
            {
                if (!TryParseHelloFrame(frame.Slice(HeaderSize), ref info, options, callback))
                {
                    isComplete = false;
                }
            }

            return(isComplete);
        }
Exemple #4
0
        public Block[] Process(Keccak branchStateRoot, List <Block> suggestedBlocks, ProcessingOptions options, IBlockTracer blockTracer)
        {
            if (suggestedBlocks.Count == 0)
            {
                return(Array.Empty <Block>());
            }

            int stateSnapshot = _stateDb.TakeSnapshot();
            int codeSnapshot  = _codeDb.TakeSnapshot();

            if (stateSnapshot != -1 || codeSnapshot != -1)
            {
                if (_logger.IsError)
                {
                    _logger.Error($"Uncommitted state ({stateSnapshot}, {codeSnapshot}) when processing from a branch root {branchStateRoot} starting with block {suggestedBlocks[0].ToString(Block.Format.Short)}");
                }
            }

            Keccak snapshotStateRoot = _stateProvider.StateRoot;

            if (branchStateRoot != null && _stateProvider.StateRoot != branchStateRoot)
            {
                /* discarding the other branch data - chain reorganization */
                Metrics.Reorganizations++;
                _storageProvider.Reset();
                _stateProvider.Reset();
                _stateProvider.StateRoot = branchStateRoot;
            }

            var readOnly        = (options & ProcessingOptions.ReadOnlyChain) != 0;
            var processedBlocks = new Block[suggestedBlocks.Count];

            try
            {
                for (int i = 0; i < suggestedBlocks.Count; i++)
                {
                    processedBlocks[i] = ProcessOne(suggestedBlocks[i], options, blockTracer);
                    if (_logger.IsTrace)
                    {
                        _logger.Trace($"Committing trees - state root {_stateProvider.StateRoot}");
                    }
                    _stateProvider.CommitTree();
                    _storageProvider.CommitTrees();

                    if (!readOnly)
                    {
                        BlockProcessed?.Invoke(this, new BlockProcessedEventArgs(processedBlocks[i]));
                    }
                }

                if (readOnly)
                {
                    _receiptsTracer.BeforeRestore(_stateProvider);
                    Restore(stateSnapshot, codeSnapshot, snapshotStateRoot);
                }
                else
                {
                    _stateDb.Commit();
                    _codeDb.Commit();
                }

                return(processedBlocks);
            }
            catch (InvalidBlockException)
            {
                Restore(stateSnapshot, codeSnapshot, snapshotStateRoot);
                throw;
            }
        }
Exemple #5
0
        public override void OnBlockProcessingStart(Block block, ProcessingOptions options = ProcessingOptions.None)
        {
            if (block.IsGenesis)
            {
                return;
            }

            var isProducingBlock     = options.IsProducingBlock();
            var isProcessingBlock    = !isProducingBlock;
            var isInitBlock          = InitBlockNumber == block.Number;
            var notConsecutiveBlock  = block.Number - 1 > _lastProcessedBlockNumber || _lastProcessedBlockNumber == 0;
            var shouldLoadValidators = Validators == null || notConsecutiveBlock || isProducingBlock;
            var mainChainProcessing  = !ForSealing && isProcessingBlock;

            if (shouldLoadValidators)
            {
                Validators = isInitBlock || notConsecutiveBlock
                    ? LoadValidatorsFromContract(BlockTree.FindParentHeader(block.Header, BlockTreeLookupOptions.None))
                    : ValidatorStore.GetValidators();

                if (mainChainProcessing)
                {
                    if (_logger.IsInfo)
                    {
                        _logger.Info($"{(isInitBlock ? "Initial" : "Current")} contract validators ({Validators.Length}): [{string.Join<Address>(", ", Validators)}].");
                    }
                }
            }

            if (isInitBlock)
            {
                if (mainChainProcessing)
                {
                    ValidatorStore.SetValidators(InitBlockNumber, Validators);
                }

                InitiateChange(block, Validators.ToArray(), isProcessingBlock, true);
            }
            else
            {
                if (mainChainProcessing && notConsecutiveBlock)
                {
                    bool loadedValidatorsAreSameInStore = (ValidatorStore.GetValidators()?.SequenceEqual(Validators) == true);
                    if (!loadedValidatorsAreSameInStore)
                    {
                        ValidatorStore.SetValidators(_blockFinalizationManager.GetLastLevelFinalizedBy(block.ParentHash), Validators);
                    }
                }

                if (isProcessingBlock)
                {
                    bool reorganisationHappened = block.Number <= _lastProcessedBlockNumber;
                    if (reorganisationHappened)
                    {
                        var reorganisationToBlockBeforePendingValidatorsInitChange = block.Number <= CurrentPendingValidators?.BlockNumber;
                        SetPendingValidators(reorganisationToBlockBeforePendingValidatorsInitChange ? null : LoadPendingValidators(), reorganisationToBlockBeforePendingValidatorsInitChange);
                    }
                    else if (block.Number > _lastProcessedBlockNumber + 1) // blocks skipped, like fast sync
                    {
                        SetPendingValidators(TryGetInitChangeFromPastBlocks(block.ParentHash), true);
                    }
                }
                else
                {
                    // if we are not processing blocks we are not on consecutive blocks.
                    // We need to initialize pending validators from db on each block being produced.
                    SetPendingValidators(LoadPendingValidators());
                }
            }

            base.OnBlockProcessingStart(block, options);

            FinalizePendingValidatorsIfNeeded(block.Header, isProcessingBlock);

            _lastProcessedBlockNumber = block.Number;
        }
        public Block Process(Block suggestedBlock, ProcessingOptions options, IBlockTracer tracer)
        {
            if (!RunSimpleChecksAheadOfProcessing(suggestedBlock, options))
            {
                return(null);
            }

            UInt256 totalDifficulty = suggestedBlock.TotalDifficulty ?? 0;

            if (_logger.IsTrace)
            {
                _logger.Trace($"Total difficulty of block {suggestedBlock.ToString(Block.Format.Short)} is {totalDifficulty}");
            }


            Block[] processedBlocks = null;
            bool    shouldProcess   = suggestedBlock.IsGenesis ||
                                      totalDifficulty > (_blockTree.Head?.TotalDifficulty ?? 0)
                                      // so above is better and more correct but creates an impression of the node staying behind on stats page
                                      // so we are okay to process slightly more
                                      // and below is less correct but potentially reporting well
                                      // || totalDifficulty >= (_blockTree.Head?.TotalDifficulty ?? 0)
                                      || (options & ProcessingOptions.ForceProcessing) == ProcessingOptions.ForceProcessing;

            if (!shouldProcess)
            {
                if (_logger.IsDebug)
                {
                    _logger.Debug($"Skipped processing of {suggestedBlock.ToString(Block.Format.FullHashAndNumber)}, Head = {_blockTree.Head?.Header?.ToString(BlockHeader.Format.Short)}, total diff = {totalDifficulty}, head total diff = {_blockTree.Head?.TotalDifficulty}");
                }
                return(null);
            }

            ProcessingBranch processingBranch = PrepareProcessingBranch(suggestedBlock, options);

            PrepareBlocksToProcess(suggestedBlock, options, processingBranch);

            try
            {
                processedBlocks = _blockProcessor.Process(processingBranch.Root, processingBranch.BlocksToProcess, options, tracer);
            }
            catch (InvalidBlockException ex)
            {
                for (int i = 0; i < processingBranch.BlocksToProcess.Count; i++)
                {
                    if (processingBranch.BlocksToProcess[i].Hash == ex.InvalidBlockHash)
                    {
                        _blockTree.DeleteInvalidBlock(processingBranch.BlocksToProcess[i]);
                        if (_logger.IsDebug)
                        {
                            _logger.Debug($"Skipped processing of {suggestedBlock.ToString(Block.Format.FullHashAndNumber)} because of {processingBranch.BlocksToProcess[i].ToString(Block.Format.FullHashAndNumber)} is invalid");
                        }
                        return(null);
                    }
                }
            }

            if ((options & (ProcessingOptions.ReadOnlyChain | ProcessingOptions.DoNotUpdateHead)) == 0)
            {
                _blockTree.UpdateMainChain(processingBranch.Blocks.ToArray(), true);
            }

            Block lastProcessed = null;

            if (processedBlocks != null && processedBlocks.Length > 0)
            {
                lastProcessed = processedBlocks[^ 1];
        public Block Process(Block suggestedBlock, ProcessingOptions options, IBlockTracer blockTracer)
        {
            if (!RunSimpleChecksAheadOfProcessing(suggestedBlock, options))
            {
                return(null);
            }

            UInt256 totalDifficulty = suggestedBlock.TotalDifficulty ?? 0;

            if (_logger.IsTrace)
            {
                _logger.Trace($"Total difficulty of block {suggestedBlock.ToString(Block.Format.Short)} is {totalDifficulty}");
            }

            BlockHeader branchingPoint = null;

            Block[] processedBlocks = null;
            if (_blockTree.Head == null || totalDifficulty > _blockTree.Head.TotalDifficulty || (options & ProcessingOptions.ForceProcessing) != 0)
            {
                List <Block> blocksToBeAddedToMain = new List <Block>();
                Block        toBeProcessed         = suggestedBlock;
                do
                {
                    blocksToBeAddedToMain.Add(toBeProcessed);
                    if (toBeProcessed.IsGenesis)
                    {
                        break;
                    }

                    branchingPoint = _blockTree.FindParentHeader(toBeProcessed.Header);
                    if (branchingPoint == null)
                    {
                        break; //failure here
                    }

                    toBeProcessed = _blockTree.FindParent(toBeProcessed.Header);
                    if (toBeProcessed == null)
                    {
                        // fast synced from here
                        break;
                    }
                } while (!_blockTree.IsMainChain(branchingPoint.Hash));

                if (branchingPoint != null && branchingPoint.Hash != _blockTree.Head?.Hash)
                {
                    if (_logger.IsTrace)
                    {
                        _logger.Trace($"Head block was: {_blockTree.Head?.ToString(BlockHeader.Format.Short)}");
                    }
                    if (_logger.IsTrace)
                    {
                        _logger.Trace($"Branching from: {branchingPoint.ToString(BlockHeader.Format.Short)}");
                    }
                }
                else
                {
                    if (_logger.IsTrace)
                    {
                        _logger.Trace(branchingPoint == null ? "Setting as genesis block" : $"Adding on top of {branchingPoint.ToString(BlockHeader.Format.Short)}");
                    }
                }

                Keccak stateRoot = branchingPoint?.StateRoot;
                if (_logger.IsTrace)
                {
                    _logger.Trace($"State root lookup: {stateRoot}");
                }

                List <Block> blocksToProcess = new List <Block>();
                Block[]      blocks;
                if ((options & ProcessingOptions.ForceProcessing) != 0)
                {
                    blocksToBeAddedToMain.Clear();
                    blocks    = new Block[1];
                    blocks[0] = suggestedBlock;
                }
                else
                {
                    foreach (Block block in blocksToBeAddedToMain)
                    {
                        if (block.Hash != null && _blockTree.WasProcessed(block.Number, block.Hash))
                        {
                            stateRoot = block.Header.StateRoot;
                            if (_logger.IsTrace)
                            {
                                _logger.Trace($"State root lookup: {stateRoot}");
                            }
                            break;
                        }

                        blocksToProcess.Add(block);
                    }

                    blocks = new Block[blocksToProcess.Count];
                    for (int i = 0; i < blocksToProcess.Count; i++)
                    {
                        blocks[blocks.Length - i - 1] = blocksToProcess[i];
                    }
                }

                if (_logger.IsTrace)
                {
                    _logger.Trace($"Processing {blocks.Length} blocks from state root {stateRoot}");
                }

                for (int i = 0; i < blocks.Length; i++)
                {
                    /* this can happen if the block was loaded as an ancestor and did not go through the recovery queue */
                    _recoveryStep.RecoverData(blocks[i]);
                }

                try
                {
                    processedBlocks = _blockProcessor.Process(stateRoot, blocks, options, blockTracer);
                }
                catch (InvalidBlockException ex)
                {
                    for (int i = 0; i < blocks.Length; i++)
                    {
                        if (blocks[i].Hash == ex.InvalidBlockHash)
                        {
                            _blockTree.DeleteInvalidBlock(blocks[i]);
                            if (_logger.IsDebug)
                            {
                                _logger.Debug($"Skipped processing of {suggestedBlock.ToString(Block.Format.FullHashAndNumber)} because of {blocks[i].ToString(Block.Format.FullHashAndNumber)} is invalid");
                            }
                            return(null);
                        }
                    }
                }

                if ((options & ProcessingOptions.ReadOnlyChain) == 0)
                {
                    _blockTree.UpdateMainChain(blocksToBeAddedToMain.ToArray());
                }
            }
            else
            {
                if (_logger.IsDebug)
                {
                    _logger.Debug($"Skipped processing of {suggestedBlock.ToString(Block.Format.FullHashAndNumber)}, Head = {_blockTree.Head?.ToString(BlockHeader.Format.Short)}, total diff = {totalDifficulty}, head total diff = {_blockTree.Head?.TotalDifficulty}");
                }
            }

            Block lastProcessed = null;

            if (processedBlocks != null && processedBlocks.Length > 0)
            {
                lastProcessed = processedBlocks[processedBlocks.Length - 1];
                if (_logger.IsTrace)
                {
                    _logger.Trace($"Setting total on last processed to {lastProcessed.ToString(Block.Format.Short)}");
                }
                lastProcessed.TotalDifficulty = suggestedBlock.TotalDifficulty;
            }
            else
            {
                if (_logger.IsDebug)
                {
                    _logger.Debug($"Skipped processing of {suggestedBlock.ToString(Block.Format.FullHashAndNumber)}, last processed is null: {lastProcessed == null}, processedBlocks.Length: {processedBlocks?.Length}");
                }
            }

            return(lastProcessed);
        }
        public async Task Process_Event_Consumes_All_Messages(int numThreads)
        {
            await using (var scope = await ServiceBusScope.CreateWithQueue(
                             enablePartitioning: false,
                             enableSession: true))
            {
                await using var sender = new ServiceBusSenderClient(
                                TestEnvironment.ServiceBusConnectionString,
                                scope.QueueName);

                // send 1 message for each thread and use a different session for each message
                ConcurrentDictionary <string, bool> sessions = new ConcurrentDictionary <string, bool>();
                for (int i = 0; i < numThreads; i++)
                {
                    var sessionId = Guid.NewGuid().ToString();
                    await sender.SendAsync(GetMessage(sessionId));

                    sessions.TryAdd(sessionId, true);
                }

                var clientOptions = new ServiceBusProcessorClientOptions()
                {
                    IsSessionEntity = true,
                    ReceiveMode     = ReceiveMode.ReceiveAndDelete,
                    RetryOptions    = new ServiceBusRetryOptions()
                    {
                        // to prevent the receive batch from taking a long time when we
                        // expect it to fail
                        MaximumRetries = 0,
                        TryTimeout     = TimeSpan.FromSeconds(5)
                    }
                };
                await using var processor = new ServiceBusProcessorClient(
                                TestEnvironment.ServiceBusConnectionString,
                                scope.QueueName,
                                clientOptions);
                int messageCt = 0;

                var options = new ProcessingOptions()
                {
                    MaxConcurrentCalls = numThreads
                };

                TaskCompletionSource <bool> taskCompletionSource = new TaskCompletionSource <bool>(TaskCreationOptions.RunContinuationsAsynchronously);

                processor.ProcessMessageAsync += ProcessMessage;
                processor.ProcessErrorAsync   += ExceptionHandler;
                await processor.StartProcessingAsync(options);

                async Task ProcessMessage(ServiceBusMessage message, ServiceBusSession session)
                {
                    await processor.CompleteAsync(message.SystemProperties.LockToken);

                    sessions.TryRemove(message.SessionId, out bool _);
                    Assert.AreEqual(message.SessionId, await session.GetSessionIdAsync());
                    Assert.IsNotNull(await session.GetLockedUntilUtcAsync());
                    var ct = Interlocked.Increment(ref messageCt);

                    if (ct == numThreads)
                    {
                        taskCompletionSource.SetResult(true);
                    }
                }

                await taskCompletionSource.Task;


                // we only give each thread enough time to process one message, so the total number of messages
                // processed should equal the number of threads
                Assert.AreEqual(numThreads, messageCt);

                // we should have received messages from each of the sessions
                Assert.AreEqual(0, sessions.Count);

                // try receiving to verify empty
                // since all the messages are gone and we are using sessions, we won't actually
                // be able to open the Receive link
                await using var receiver = new ServiceBusReceiverClient(
                                TestEnvironment.ServiceBusConnectionString,
                                scope.QueueName);
                Assert.That(async() => await receiver.ReceiveBatchAsync(numThreads), Throws.Exception);
            }
        }
Exemple #9
0
 public ProcessingContextBuilder AddOptions(ProcessingOptions options)
 {
     _options = options;
     return(this);
 }
Exemple #10
0
    // This function will try to parse TLS hello frame and fill details in provided info structure.
    // If frame was fully processed without any error, function returns true.
    // Otherwise it returns false and info may have partial data.
    // It is OK to call it again if more data becomes available.
    // It is also possible to limit what information is processed.
    // If callback delegate is provided, it will be called on ALL extensions.
    public static bool TryGetFrameInfo(ReadOnlySpan <byte> frame, ref TlsFrameInfo info, ProcessingOptions options = ProcessingOptions.All, HelloExtensionCallback?callback = null)
    {
        const int HandshakeTypeOffset = 5;

        if (frame.Length < HeaderSize)
        {
            return(false);
        }

        // This will not fail since we have enough data.
        bool gotHeader = TryGetFrameHeader(frame, ref info.Header);

        Debug.Assert(gotHeader);

        info.SupportedVersions = info.Header.Version;
#pragma warning disable CS0618 // Ssl2 and Ssl3 are obsolete
        if (info.Header.Version == SslProtocols.Ssl2)
        {
            // This is safe. We would not get here if the length is too small.
            info.SupportedVersions |= TlsMinorVersionToProtocol(frame[4]);
            // We only recognize Unified ClientHello at the moment.
            // This is needed to trigger certificate selection callback in SslStream.
            info.HandshakeType = TlsHandshakeType.ClientHello;
            // There is no more parsing for old protocols.
            return(true);
        }
#pragma warning restore CS0618

        if (info.Header.Type == TlsContentType.Alert)
        {
            TlsAlertLevel       level       = default;
            TlsAlertDescription description = default;
            if (TryGetAlertInfo(frame, ref level, ref description))
            {
                info.AlertDescription = description;
                return(true);
            }

            return(false);
        }

        if (info.Header.Type != TlsContentType.Handshake || frame.Length <= HandshakeTypeOffset)
        {
            return(false);
        }

        info.HandshakeType = (TlsHandshakeType)frame[HandshakeTypeOffset];

        // Check if we have full frame.
        bool isComplete = frame.Length >= HeaderSize + info.Header.Length;

        if (((int)info.Header.Version >= (int)SslProtocols.Tls) &&
            (info.HandshakeType == TlsHandshakeType.ClientHello || info.HandshakeType == TlsHandshakeType.ServerHello))
        {
            if (!TryParseHelloFrame(frame.Slice(HeaderSize), ref info, options, callback))
            {
                isComplete = false;
            }
        }

        return(isComplete);
    }
        public void MappingAutoprefixerWarnings()
        {
            // Arrange
            var options = new ProcessingOptions {
                Browsers = new List <string> {
                    "last 4 version"
                },
                Grid = GridMode.Autoplace
            };

            const string content   = @".some-class {
    /* autoprefixer: off */
    -webkit-box-shadow: 0 0 20px #555;
       -moz-box-shadow: 0 0 20px #555;
            box-shadow: 0 0 20px #555;
    /* autoprefixer: on */
    mask: none;
}

.grid-conflict {
    display: grid;
    grid-gap: 10px;
    grid-template:
        ""g   g"" 100px
        ""g   g"" 100px
        ""h   h"" 100px /
        1fr  1fr;
}";
            const string inputPath = "/build/app.css";
            const string targetProcessedContent = @".some-class {
    /* autoprefixer: off */
    -webkit-box-shadow: 0 0 20px #555;
       -moz-box-shadow: 0 0 20px #555;
            box-shadow: 0 0 20px #555;
    /* autoprefixer: on */
    mask: none;
}

.grid-conflict {
    display: -ms-grid;
    display: grid;
    grid-gap: 10px;
    -ms-grid-rows: 100px 10px 100px 10px 100px;
    -ms-grid-columns: 1fr 10px 1fr;
        grid-template:
        ""g   g"" 100px
        ""g   g"" 100px
        ""h   h"" 100px /
        1fr  1fr;
}";

            // Act
            string processedContent;
            IList <ProblemInfo> warnings;

            using (var autoprefixer = new Autoprefixer(options))
            {
                ProcessingResult result = autoprefixer.Process(content, inputPath);
                processedContent = result.ProcessedContent;
                warnings         = result.Warnings;
            }

            // Assert
            Assert.AreEqual(targetProcessedContent, processedContent);

            Assert.AreEqual(2, warnings.Count);

            Assert.AreEqual(
                "autoprefixer: /build/app.css:6:5: " +
                "Second Autoprefixer control comment was ignored. " +
                "Autoprefixer applies control comment to whole block, not to next rules.",
                warnings[0].Message
                );
            Assert.AreEqual(
                "Second Autoprefixer control comment was ignored. " +
                "Autoprefixer applies control comment to whole block, not to next rules.",
                warnings[0].Description
                );
            Assert.AreEqual("/build/app.css", warnings[0].File);
            Assert.AreEqual(6, warnings[0].LineNumber);
            Assert.AreEqual(5, warnings[0].ColumnNumber);
            Assert.AreEqual(
                "Line 5:             box-shadow: 0 0 20px #555;" + Environment.NewLine +
                "Line 6:     /* autoprefixer: on */" + Environment.NewLine +
                "------------^" + Environment.NewLine +
                "Line 7:     mask: none;",
                warnings[0].SourceFragment
                );

            Assert.AreEqual(
                "autoprefixer: /build/app.css:13:5: " +
                "Can not find grid areas: g, h",
                warnings[1].Message
                );
            Assert.AreEqual("Can not find grid areas: g, h", warnings[1].Description);
            Assert.AreEqual("/build/app.css", warnings[1].File);
            Assert.AreEqual(13, warnings[1].LineNumber);
            Assert.AreEqual(5, warnings[1].ColumnNumber);
            Assert.AreEqual(
                "Line 12:     grid-gap: 10px;" + Environment.NewLine +
                "Line 13:     grid-template:" + Environment.NewLine +
                "-------------^" + Environment.NewLine +
                "Line 14:         \"g   g\" 100px",
                warnings[1].SourceFragment
                );
        }
Exemple #12
0
        // TODO: move to branch processor
        public Block[] Process(Keccak newBranchStateRoot, List <Block> suggestedBlocks, ProcessingOptions options, IBlockTracer blockTracer)
        {
            if (suggestedBlocks.Count == 0)
            {
                return(Array.Empty <Block>());
            }

            BlocksProcessing?.Invoke(this, new BlocksProcessingEventArgs(suggestedBlocks));

            /* We need to save the snapshot state root before reorganization in case the new branch has invalid blocks.
             * In case of invalid blocks on the new branch we will discard the entire branch and come back to
             * the previous head state.*/
            Keccak previousBranchStateRoot = CreateCheckpoint();

            InitBranch(newBranchStateRoot);

            bool readOnly    = (options & ProcessingOptions.ReadOnlyChain) != 0;
            var  blocksCount = suggestedBlocks.Count;

            Block[] processedBlocks = new Block[blocksCount];
            try
            {
                for (int i = 0; i < blocksCount; i++)
                {
                    if (blocksCount > 64 && i % 8 == 0)
                    {
                        if (_logger.IsInfo)
                        {
                            _logger.Info($"Processing part of a long blocks branch {i}/{blocksCount}");
                        }
                    }

                    _witnessCollector.Reset();

                    var(processedBlock, receipts) = ProcessOne(suggestedBlocks[i], options, blockTracer);
                    processedBlocks[i]            = processedBlock;

                    // be cautious here as AuRa depends on processing
                    PreCommitBlock(newBranchStateRoot, suggestedBlocks[i].Number);
                    if (!readOnly)
                    {
                        _witnessCollector.Persist(processedBlock.Hash !);
                        BlockProcessed?.Invoke(this, new BlockProcessedEventArgs(processedBlock, receipts));
                    }

                    // CommitBranch in parts if we have long running branch
                    bool isFirstInBatch = i == 0;
                    bool isLastInBatch  = i == blocksCount - 1;
                    bool isNotAtTheEdge = !isFirstInBatch && !isLastInBatch;
                    bool isCommitPoint  = i % MaxUncommittedBlocks == 0 && isNotAtTheEdge;
                    if (isCommitPoint && readOnly == false)
                    {
                        if (_logger.IsInfo)
                        {
                            _logger.Info($"Commit part of a long blocks branch {i}/{blocksCount}");
                        }
                        CommitBranch();
                        previousBranchStateRoot = CreateCheckpoint();
                        var newStateRoot = suggestedBlocks[i].StateRoot;
                        InitBranch(newStateRoot, false);
                    }
                }

                if (readOnly)
                {
                    RestoreBranch(previousBranchStateRoot);
                }
                else
                {
                    // TODO: move to branch processor
                    CommitBranch();
                }

                return(processedBlocks);
            }
            catch (Exception ex) // try to restore for all cost
            {
                _logger.Trace($"Encountered exception {ex} while processing blocks.");
                RestoreBranch(previousBranchStateRoot);
                throw;
            }
        }
Exemple #13
0
        static void Main(string[] args)
        {
            DocumentConverterServiceClient client = null;

            try
            {
                // ** Determine the source file and read it into a byte array.
                string sourceFileName = null;
                if (args.Length == 0)
                {
                    //** Delete any split files from a previous test run.
                    foreach (string file in Directory.GetFiles(Directory.GetCurrentDirectory(), "spf-*.pdf"))
                    {
                        File.Delete(file);
                    }

                    // ** If nothing is specified then read the first PDF file from the current folder.
                    string[] sourceFiles = Directory.GetFiles(Directory.GetCurrentDirectory(), "*.pdf");
                    if (sourceFiles.Length > 0)
                    {
                        sourceFileName = sourceFiles[0];
                    }
                    else
                    {
                        Console.WriteLine("Please specify a document to split.");
                        Console.ReadKey();
                        return;
                    }
                }
                else
                {
                    sourceFileName = args[0];
                }

                byte[] sourceFile = File.ReadAllBytes(sourceFileName);

                // ** Open the service and configure the bindings
                client = OpenService(SERVICE_URL);

                //** Set the absolute minimum open options
                OpenOptions openOptions = new OpenOptions();
                openOptions.OriginalFileName = Path.GetFileName(sourceFileName);
                openOptions.FileExtension    = "pdf";

                // ** Set the absolute minimum conversion settings.
                ConversionSettings conversionSettings = new ConversionSettings();

                // ** Create the ProcessingOptions for the splitting task.
                ProcessingOptions processingOptions = new ProcessingOptions()
                {
                    MergeSettings = null,
                    SplitOptions  = new FileSplitOptions()
                    {
                        FileNameTemplate = "spf-{0:D3}",
                        FileSplitType    = FileSplitType.ByNumberOfPages,
                        BatchSize        = 5,
                        BookmarkLevel    = 0
                    },
                    SourceFiles = new SourceFile[1]
                    {
                        new SourceFile()
                        {
                            MergeSettings      = null,
                            OpenOptions        = openOptions,
                            ConversionSettings = conversionSettings,
                            File = sourceFile
                        }
                    }
                };

                // ** Carry out the splittng.
                Console.WriteLine("Splitting file " + sourceFileName);
                BatchResults batchResults = client.ProcessBatchAsync(processingOptions).GetAwaiter().GetResult();

                // ** Process the returned files
                foreach (BatchResult result in batchResults.Results)
                {
                    Console.WriteLine("Writing split file " + result.FileName);
                    File.WriteAllBytes(result.FileName, result.File);
                }

                Console.WriteLine("Finished.");
            }
            catch (FaultException <WebServiceFaultException> ex)
            {
                Console.WriteLine("FaultException occurred: ExceptionType: " +
                                  ex.Detail.ExceptionType.ToString());
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
            }
            finally
            {
                CloseService(client);
            }
        }
Exemple #14
0
 public HlfComparer(Hlf engHlf, Hlf lngHlf, ProcessingOptions options)
 {
     EngHlf  = engHlf;
     LngHlf  = lngHlf;
     Options = options;
 }
        public static IEnumerable<IFileHandler> CreateFileHandlers(ProcessingOptions processingOptions,
            SolutionProperties properties)
        {
            bool optimizePng = (ProcessingOptions.OptimizePng & processingOptions) != 0;
            bool packageModified = false;

            var fileHandlers = new List<IFileHandler>();
            fileHandlers.Add(new BackupFilesHandler());

            if ((ProcessingOptions.DeOdex & processingOptions) != 0)
            {
                fileHandlers.Add(new DeOdexHandler(properties));
                packageModified = true;
                if (optimizePng)
                {
                    fileHandlers.Add(new UnPackHandler());
                    fileHandlers.Add(new OptiPngHandler(properties));
                    fileHandlers.Add(new RePackPngHandler());
                }
            }
            else
            {
                if ((ProcessingOptions.Decompile & processingOptions) != 0)
                {
                    fileHandlers.Add(new BaksmaliHandler(properties));
                }

                if ((ProcessingOptions.Decode & processingOptions) != 0)
                {
                    fileHandlers.Add(new DecodeHandler(properties));
                }
                else if (optimizePng)
                {
                    fileHandlers.Add(new UnPackHandler());
                }

                if ((ProcessingOptions.ProcessModifications & processingOptions) != 0)
                {
                    fileHandlers.Add(new ModPlugInHandler());
                }

                if (optimizePng)
                {
                    fileHandlers.Add(new OptiPngHandler(properties));
                }

                if ((ProcessingOptions.Encode & processingOptions) != 0)
                {
                    fileHandlers.Add(new EncodeHandler(properties));
                    packageModified = true;
                }
                else if (optimizePng)
                {
                    fileHandlers.Add(new RePackPngHandler());
                    packageModified = true;
                }

                if ((ProcessingOptions.Recompile & processingOptions) != 0)
                {
                    fileHandlers.Add(new SmaliHandler(properties));
                    packageModified = true;
                }

                if ((ProcessingOptions.ReSignApkFiles & processingOptions) != 0 && packageModified)
                {
                    fileHandlers.Add(new SignApkHandler(properties));
                }
            }
            if (packageModified)
            {
                fileHandlers.Add(new ZipAlignHandler(properties));
            }
            return fileHandlers;
        }
Exemple #16
0
        private Block ProcessOne(Block suggestedBlock, ProcessingOptions options, IBlockTracer blockTracer)
        {
            if (_syncConfig.ValidateTree)
            {
                if (_logger.IsWarn)
                {
                    _logger.Warn("Collecting trie stats:");
                }
                TrieStats stats = _stateProvider.CollectStats();
                if (stats.MissingNodes > 0)
                {
                    if (_logger.IsError)
                    {
                        _logger.Error(stats.ToString());
                    }
                }
                else
                {
                    if (_logger.IsWarn)
                    {
                        _logger.Warn(stats.ToString());
                    }
                }
            }

            if (suggestedBlock.IsGenesis)
            {
                return(suggestedBlock);
            }

            if (_specProvider.DaoBlockNumber.HasValue && _specProvider.DaoBlockNumber.Value == suggestedBlock.Header.Number)
            {
                if (_logger.IsInfo)
                {
                    _logger.Info("Applying DAO transition");
                }
                ApplyDaoTransition();
            }

            Block block    = PrepareBlockForProcessing(suggestedBlock);
            var   receipts = ProcessTransactions(block, options, blockTracer);

            SetReceiptsRootAndBloom(block, receipts);
            ApplyMinerRewards(block, blockTracer);

            _stateProvider.Commit(_specProvider.GetSpec(block.Number));

            block.Header.StateRoot = _stateProvider.StateRoot;
            block.Header.Hash      = BlockHeader.CalculateHash(block.Header);
            if ((options & ProcessingOptions.NoValidation) == 0 && !_blockValidator.ValidateProcessedBlock(block, receipts, suggestedBlock))
            {
                if (_logger.IsError)
                {
                    _logger.Error($"Processed block is not valid {suggestedBlock.ToString(Block.Format.FullHashAndNumber)}");
                }
                throw new InvalidBlockException(suggestedBlock.Hash);
            }

            if ((options & ProcessingOptions.StoreReceipts) != 0)
            {
                StoreTxReceipts(block, receipts);
            }

            if ((options & ProcessingOptions.StoreTraces) != 0)
            {
                StoreTraces(blockTracer as ParityLikeBlockTracer);
            }

            BlockProcessed?.Invoke(this, new BlockProcessedEventArgs(block));
            return(block);
        }
Exemple #17
0
        protected virtual TxReceipt[] ProcessBlock(Block block, IBlockTracer blockTracer, ProcessingOptions options)
        {
            TxReceipt[] receipts = ProcessTransactions(block, options, blockTracer);
            SetReceiptsRoot(block, receipts);
            ApplyMinerRewards(block, blockTracer);

            _stateProvider.Commit(_specProvider.GetSpec(block.Number));
            _stateProvider.RecalculateStateRoot();
            block.Header.StateRoot = _stateProvider.StateRoot;
            block.Header.Hash      = block.Header.CalculateHash();

            return(receipts);
        }
        public async Task Process_Event_SessionId(int numThreads)
        {
            await using (var scope = await ServiceBusScope.CreateWithQueue(
                             enablePartitioning: false,
                             enableSession: true))
            {
                await using var sender = new ServiceBusSenderClient(
                                TestEnvironment.ServiceBusConnectionString,
                                scope.QueueName);

                // send 1 message for each thread and use a different session for each message
                ConcurrentDictionary <string, bool> sessions = new ConcurrentDictionary <string, bool>();
                string sessionId = null;
                for (int i = 0; i < numThreads; i++)
                {
                    sessionId = Guid.NewGuid().ToString();
                    await sender.SendAsync(GetMessage(sessionId));

                    sessions.TryAdd(sessionId, true);
                }

                var clientOptions = new ServiceBusProcessorClientOptions()
                {
                    // just use the last sessionId from the loop above
                    SessionId       = sessionId,
                    IsSessionEntity = true,
                };

                await using var processor = new ServiceBusProcessorClient(
                                TestEnvironment.ServiceBusConnectionString,
                                scope.QueueName,
                                clientOptions);
                int messageCt = 0;

                var options = new ProcessingOptions()
                {
                    MaxConcurrentCalls = numThreads
                };

                TaskCompletionSource <bool>[] completionSources = Enumerable
                                                                  .Range(0, numThreads)
                                                                  .Select(index => new TaskCompletionSource <bool>(TaskCreationOptions.RunContinuationsAsynchronously))
                                                                  .ToArray();
                var completionSourceIndex = -1;

                processor.ProcessMessageAsync += ProcessMessage;
                processor.ProcessErrorAsync   += ExceptionHandler;
                await processor.StartProcessingAsync(options);

                async Task ProcessMessage(ServiceBusMessage message, ServiceBusSession session)
                {
                    await processor.CompleteAsync(message.SystemProperties.LockToken);

                    Interlocked.Increment(ref messageCt);
                    sessions.TryRemove(message.SessionId, out bool _);
                    Assert.AreEqual(sessionId, message.SessionId);
                    Assert.AreEqual(sessionId, await session.GetSessionIdAsync());
                    Assert.IsNotNull(await session.GetLockedUntilUtcAsync());
                    var setIndex = Interlocked.Increment(ref completionSourceIndex);

                    completionSources[setIndex].TrySetResult(true);
                }

                await Task.WhenAny(completionSources.Select(source => source.Task));

                // although we are allowing concurrent calls,
                // since we are specifying a specific session, the
                // concurrency won't really work as only one receiver can be linked to the session; TODO may want to add validation for this
                Assert.AreEqual(1, messageCt);

                // we should have received messages from only the specified session
                Assert.AreEqual(numThreads - 1, sessions.Count);
            }
        }
Exemple #19
0
        public Block[] Process(Keccak newBranchStateRoot, List <Block> suggestedBlocks, ProcessingOptions options, IBlockTracer blockTracer)
        {
            if (suggestedBlocks.Count == 0)
            {
                return(Array.Empty <Block>());
            }

            /* We need to save the snapshot state root before reorganization in case the new branch has invalid blocks.
             * In case of invalid blocks on the new branch we will discard the entire branch and come back to
             * the previous head state.*/
            Keccak previousBranchStateRoot = CreateCheckpoint();

            InitBranch(newBranchStateRoot);

            bool readOnly = (options & ProcessingOptions.ReadOnlyChain) != 0;

            Block[] processedBlocks = new Block[suggestedBlocks.Count];
            try
            {
                for (int i = 0; i < suggestedBlocks.Count; i++)
                {
                    processedBlocks[i] = ProcessOne(suggestedBlocks[i], options, blockTracer);

                    // be cautious here as AuRa depends on processing
                    PreCommitBlock(newBranchStateRoot); // only needed if we plan to read state root?
                    if (!readOnly)
                    {
                        BlockProcessed?.Invoke(this, new BlockProcessedEventArgs(processedBlocks[i]));
                    }
                }

                if (readOnly)
                {
                    RestoreBranch(previousBranchStateRoot);
                }
                else
                {
                    CommitBranch();
                }

                return(processedBlocks);
            }
            catch (Exception) // try to restore for all cost
            {
                RestoreBranch(previousBranchStateRoot);
                throw;
            }
        }
        protected override TxReceipt[] ProcessBlock(Block block, IBlockTracer blockTracer, ProcessingOptions options)
        {
            _auRaBlockProcessorExtension.PreProcess(block, options);
            var receipts = base.ProcessBlock(block, blockTracer, options);

            _auRaBlockProcessorExtension.PostProcess(block, receipts, options);
            return(receipts);
        }
Exemple #21
0
        public void SuggestBlock(Keccak blockHash, ProcessingOptions processingOptions)
        {
            Block block = _blockTree.FindBlock(blockHash, false);

            SuggestBlock(block, processingOptions);
        }
Exemple #22
0
 public static bool ContainsFlag(this ProcessingOptions processingOptions, ProcessingOptions flag) => (processingOptions & flag) == flag;
Exemple #23
0
 public virtual void OnBlockProcessingEnd(Block block, TxReceipt[] receipts, ProcessingOptions options = ProcessingOptions.None)
 {
 }
Exemple #24
0
        protected virtual TxReceipt[] ProcessBlock(Block block, IBlockTracer blockTracer, ProcessingOptions options)
        {
            if (!block.IsGenesis)
            {
                var receipts = ProcessTransactions(block, options, blockTracer);
                SetReceiptsRoot(block, receipts);
                ApplyMinerRewards(block, blockTracer);

                _stateProvider.Commit(_specProvider.GetSpec(block.Number));
                _stateProvider.RecalculateStateRoot();
                block.Header.StateRoot = _stateProvider.StateRoot;
                block.Header.Hash      = block.Header.CalculateHash();

                return(receipts);
            }

            if (_logger.IsTrace)
            {
                _logger.Trace($"Processed block {block.ToString(Block.Format.Short)}");
            }
            return(Array.Empty <TxReceipt>());
        }
                public Block[] Process(Keccak newBranchStateRoot, List <Block> suggestedBlocks, ProcessingOptions processingOptions, IBlockTracer blockTracer)
                {
                    if (blockTracer != NullBlockTracer.Instance)
                    {
                        // this is for block reruns on failure for diag tracing
                        throw new InvalidBlockException(Keccak.Zero);
                    }

                    _logger.Info($"Processing {suggestedBlocks.Last().ToString(Block.Format.Short)}");
                    while (true)
                    {
                        bool notYet = false;
                        for (int i = 0; i < suggestedBlocks.Count; i++)
                        {
                            Keccak hash = suggestedBlocks[i].Hash;
                            if (!_allowed.Contains(hash))
                            {
                                if (_allowedToFail.Contains(hash))
                                {
                                    _allowedToFail.Remove(hash);
                                    BlockProcessed?.Invoke(this, new BlockProcessedEventArgs(suggestedBlocks.Last()));
                                    throw new InvalidBlockException(hash);
                                }

                                notYet = true;
                                break;
                            }
                        }

                        if (notYet)
                        {
                            Thread.Sleep(20);
                        }
                        else
                        {
                            BlockProcessed?.Invoke(this, new BlockProcessedEventArgs(suggestedBlocks.Last()));
                            return(suggestedBlocks.ToArray());
                        }
                    }
                }
                public Block[] Process(Keccak branchStateRoot, Block[] suggestedBlocks, ProcessingOptions processingOptions, IBlockTracer blockTracer)
                {
                    _logger.Info($"Processing {suggestedBlocks.Last().ToString(Block.Format.Short)}");
                    while (true)
                    {
                        bool notYet = false;
                        for (int i = 0; i < suggestedBlocks.Length; i++)
                        {
                            Keccak hash = suggestedBlocks[i].Hash;
                            if (!_allowed.Contains(hash))
                            {
                                if (_allowedToFail.Contains(hash))
                                {
                                    _allowedToFail.Remove(hash);
                                    BlockProcessed?.Invoke(this, new BlockProcessedEventArgs(suggestedBlocks.Last()));
                                    throw new InvalidBlockException(hash);
                                }

                                notYet = true;
                                break;
                            }
                        }

                        if (notYet)
                        {
                            Thread.Sleep(20);
                        }
                        else
                        {
                            BlockProcessed?.Invoke(this, new BlockProcessedEventArgs(suggestedBlocks.Last()));
                            return(suggestedBlocks);
                        }
                    }
                }
Exemple #27
0
 public void OnBlockProcessingStart(Block block, ProcessingOptions options = ProcessingOptions.None)
 {
     _contractValidator.OnBlockProcessingStart(block, options);
 }
Exemple #28
0
 public static bool IsNotReadOnly(this ProcessingOptions processingOptions) => (processingOptions & ProcessingOptions.ReadOnlyChain) != ProcessingOptions.ReadOnlyChain;
Exemple #29
0
        private TxAction ProcessBundle(Block block,
                                       List <BundleTransaction> bundleTransactions,
                                       LinkedHashSet <Transaction> transactionsInBlock,
                                       BlockReceiptsTracer receiptsTracer,
                                       ProcessingOptions processingOptions)
        {
            Snapshot snapshot        = _worldState.TakeSnapshot();
            int      receiptSnapshot = receiptsTracer.TakeSnapshot();
            UInt256  initialBalance  = _stateProvider.GetBalance(block.Header.GasBeneficiary !);

            bool CheckFeeNotManipulated()
            {
                UInt256 finalBalance = _stateProvider.GetBalance(block.Header.GasBeneficiary !);
                UInt256 feeReceived  = finalBalance - initialBalance;
                UInt256 originalSimulatedGasPrice = bundleTransactions[0].SimulatedBundleFee / bundleTransactions[0].SimulatedBundleGasUsed;
                UInt256 actualGasPrice            = feeReceived / (UInt256)receiptsTracer.LastReceipt.GasUsed !;

                return(actualGasPrice >= originalSimulatedGasPrice);
            }

            bool     bundleSucceeded = bundleTransactions.Count > 0;
            TxAction txAction        = TxAction.Skip;

            for (int index = 0; index < bundleTransactions.Count && bundleSucceeded; index++)
            {
                txAction         = ProcessBundleTransaction(block, bundleTransactions[index], index, receiptsTracer, processingOptions, transactionsInBlock);
                bundleSucceeded &= txAction == TxAction.Add;

                // if we need to stop on not first tx in the bundle, we actually want to skip the bundle
                txAction = txAction == TxAction.Stop && index != 0 ? TxAction.Skip : txAction;
            }

            if (bundleSucceeded)
            {
                bundleSucceeded &= CheckFeeNotManipulated();
            }

            if (bundleSucceeded)
            {
                for (int index = 0; index < bundleTransactions.Count; index++)
                {
                    BundleTransaction bundleTransaction = bundleTransactions[index];
                    transactionsInBlock.Add(bundleTransaction);
                    int txIndex = receiptSnapshot + index;
                    _transactionProcessed?.Invoke(this, new TxProcessedEventArgs(txIndex, bundleTransaction, receiptsTracer.TxReceipts[txIndex]));
                }
            }
            else
            {
                _worldState.Restore(snapshot);
                receiptsTracer.Restore(receiptSnapshot);
                for (int index = 0; index < bundleTransactions.Count; index++)
                {
                    transactionsInBlock.Remove(bundleTransactions[index]);
                }
            }

            bundleTransactions.Clear();

            return(txAction);
        }
Exemple #30
0
        public override TxReceipt[] ProcessTransactions(Block block, ProcessingOptions processingOptions, BlockReceiptsTracer receiptsTracer, IReleaseSpec spec)
        {
            IEnumerable <Transaction>   transactions        = GetTransactions(block);
            LinkedHashSet <Transaction> transactionsInBlock = new(ByHashTxComparer.Instance);
            List <BundleTransaction>    bundleTransactions  = new();
            Keccak?bundleHash = null;

            foreach (Transaction currentTx in transactions)
            {
                // if we don't accumulate bundle yet
                if (bundleHash is null)
                {
                    // and we see a bundle transaction
                    if (currentTx is BundleTransaction bundleTransaction)
                    {
                        // start accumulating the bundle6
                        bundleTransactions.Add(bundleTransaction);
                        bundleHash = bundleTransaction.BundleHash;
                    }
                    else
                    {
                        // otherwise process transaction as usual
                        TxAction action = ProcessTransaction(block, currentTx, transactionsInBlock.Count, receiptsTracer, processingOptions, transactionsInBlock);
                        if (action == TxAction.Stop)
                        {
                            break;
                        }
                    }
                }
                // if we are accumulating bundle
                else
                {
                    // if we see a bundle transaction
                    if (currentTx is BundleTransaction bundleTransaction)
                    {
                        // if its from same bundle
                        if (bundleTransaction.BundleHash == bundleHash)
                        {
                            // keep accumulating the bundle
                            bundleTransactions.Add(bundleTransaction);
                        }
                        // if its from different bundle
                        else
                        {
                            // process accumulated bundle
                            TxAction action = ProcessBundle(block, bundleTransactions, transactionsInBlock, receiptsTracer, processingOptions);
                            if (action == TxAction.Stop)
                            {
                                break;
                            }

                            // start accumulating new bundle
                            bundleTransactions.Add(bundleTransaction);
                            bundleHash = bundleTransaction.BundleHash;
                        }
                    }
                    // if we see a normal transaction
                    else
                    {
                        // process the bundle and stop accumulating it
                        bundleHash = null;
                        TxAction action = ProcessBundle(block, bundleTransactions, transactionsInBlock, receiptsTracer, processingOptions);
                        if (action == TxAction.Stop)
                        {
                            break;
                        }

                        // process normal transaction
                        action = ProcessTransaction(block, currentTx, transactionsInBlock.Count, receiptsTracer, processingOptions, transactionsInBlock);
                        if (action == TxAction.Stop)
                        {
                            break;
                        }
                    }
                }
            }
            // if we ended with accumulated bundle, lets process it
            if (bundleTransactions.Count > 0)
            {
                ProcessBundle(block, bundleTransactions, transactionsInBlock, receiptsTracer, processingOptions);
            }

            _stateProvider.Commit(spec, receiptsTracer);
            _storageProvider.Commit(receiptsTracer);

            SetTransactions(block, transactionsInBlock);
            return(receiptsTracer.TxReceipts.ToArray());
        }
        public async Task Process_Event(int numThreads)
        {
            await using (var scope = await ServiceBusScope.CreateWithQueue(
                             enablePartitioning: false,
                             enableSession: true))
            {
                await using var sender = new ServiceBusSenderClient(
                                TestEnvironment.ServiceBusConnectionString,
                                scope.QueueName);

                // send 1 message for each thread and use a different session for each message
                ConcurrentDictionary <string, bool> sessions = new ConcurrentDictionary <string, bool>();
                for (int i = 0; i < numThreads; i++)
                {
                    var sessionId = Guid.NewGuid().ToString();
                    await sender.SendAsync(GetMessage(sessionId));

                    sessions.TryAdd(sessionId, true);
                }

                var clientOptions = new ServiceBusProcessorClientOptions()
                {
                    IsSessionEntity = true,
                    ReceiveMode     = ReceiveMode.ReceiveAndDelete
                };
                await using var processor = new ServiceBusProcessorClient(
                                TestEnvironment.ServiceBusConnectionString,
                                scope.QueueName,
                                clientOptions);
                int messageCt = 0;

                var options = new ProcessingOptions()
                {
                    MaxConcurrentCalls = numThreads
                };

                TaskCompletionSource <bool>[] completionSources = Enumerable
                                                                  .Range(0, numThreads)
                                                                  .Select(index => new TaskCompletionSource <bool>(TaskCreationOptions.RunContinuationsAsynchronously))
                                                                  .ToArray();

                var completionSourceIndex = -1;

                processor.ProcessMessageAsync += ProcessMessage;
                processor.ProcessErrorAsync   += ExceptionHandler;
                await processor.StartProcessingAsync(options);

                async Task ProcessMessage(ServiceBusMessage message, ServiceBusSession session)
                {
                    await processor.CompleteAsync(message.SystemProperties.LockToken);

                    Interlocked.Increment(ref messageCt);
                    sessions.TryRemove(message.SessionId, out bool _);
                    Assert.AreEqual(message.SessionId, await session.GetSessionIdAsync());
                    Assert.IsNotNull(await session.GetLockedUntilUtcAsync());
                    var setIndex = Interlocked.Increment(ref completionSourceIndex);

                    completionSources[setIndex].TrySetResult(true);
                }

                await Task.WhenAll(completionSources.Select(source => source.Task));

                // we only give each thread enough time to process one message, so the total number of messages
                // processed should equal the number of threads
                Assert.AreEqual(numThreads, messageCt);

                // we should have received messages from each of the sessions
                Assert.AreEqual(0, sessions.Count);
            }
        }
Exemple #32
0
 public void AddProcessingOption(ProcessingOptions inProcessingOption)
 {
     InProcessingOptionses.Add(inProcessingOption);
 }
        static void Main(string[] args)
        {
            DocumentConverterServiceClient client = null;

            try
            {
                // ** Delete any processed files from a previous run
                foreach (FileInfo f in new DirectoryInfo(".").GetFiles("*_ocr.pdf"))
                {
                    f.Delete();
                }

                // ** Determine the source file and read it into a byte array.
                string sourceFileName = null;
                if (args.Length == 0)
                {
                    // ** If nothing is specified then read the first PDF file from the current folder.
                    string[] sourceFiles = Directory.GetFiles(Directory.GetCurrentDirectory(), "*.pdf");
                    if (sourceFiles.Length > 0)
                    {
                        sourceFileName = sourceFiles[0];
                    }
                    else
                    {
                        Console.WriteLine("Please specify a document to OCR.");
                        Console.ReadKey();
                        return;
                    }
                }
                else
                {
                    sourceFileName = args[0];
                }

                // ** Open the service and configure the bindings
                client = OpenService(SERVICE_URL);

                // ** Specify the various OCR related settings.
                ProcessingOptions processingOptions = new ProcessingOptions()
                {
                    // ** Set up array of source files. For OCR we can pass in only a single file at a time.
                    SourceFiles = new SourceFile[]
                    {
                        new SourceFile()
                        {
                            // ** Set the binary content
                            File = File.ReadAllBytes(sourceFileName),
                            // ** Create absolute minimum OpenOptions
                            OpenOptions = new OpenOptions()
                            {
                                OriginalFileName = Path.GetFileName(sourceFileName),
                                FileExtension    = Path.GetExtension(sourceFileName),
                            }
                        }
                    },
                    // ** Define OCR settings
                    OCRSettings = new OCRSettings()
                    {
                        // ** Select OCR engine, change this to 'Muhimbi' to use the default Muhimbi OCR engine.
                        OCREngine = "PrimeOCR",
                        // ** Set language, keep in mind that different OCR engines support different languages (e.g. in the Muhimbi OCR engine, use 'English', not 'English_UK' / 'English_US'
                        Language = OCRLanguage.English_UK.ToString(),
                        // ** Set the desired output, in this case we want the OCRed PDF and OCRed text to be returned separately.
                        OutputType = OCROutputType.Text | OCROutputType.PDF,
                        // ** Include PrimeOCR specific properties. These settings cannot be passed into the Muhimbi default OCR engine.
                        OCREngineSpecificSettings = new OCREngineSpecificSettings_PrimeOCR()
                        {
                            // ** Predefined accuracy levels can be used as well as manually defined integer values.
                            AccuracyLevel = (int)PrimeOCR_AccuracyLevel.Level6,
                            // ** Set various other parameters
                            AutoZone = PrimeOCR_AutoZone.NoAutoZone,
                            Deskew   = PrimeOCR_Deskew.On,
                            ImageProcessingOptions = PrimeOCR_ImageProcessingOptions.Autorotate |
                                                     PrimeOCR_ImageProcessingOptions.Deshade |
                                                     PrimeOCR_ImageProcessingOptions.Despeck |
                                                     PrimeOCR_ImageProcessingOptions.Smooth,
                            LexicalChecking = PrimeOCR_LexicalChecking.Lexical,
                            PageQuality     = PrimeOCR_PageQuality.NormalQuality,
                            PrintType       = PrimeOCR_PrintType.Machine,
                            ZoneContent     = PrimeOCR_ZoneContent.NoRestrictions
                        },
                    }
                };

                // ** Carry out the operation.
                Console.WriteLine("Processing file " + sourceFileName + ".");
                BatchResults results = client.ProcessBatch(processingOptions);

                // ** Get results. Both textual and PDF
                OCRResult ocredText    = results.Results[0].OCRResult;
                byte[]    ocredPdfFile = results.Results[0].File;

                // ** Process textual output
                if (ocredText != null)
                {
                    // ** Write the text into a txt file
                    string destFileName = Path.GetFileNameWithoutExtension(sourceFileName) + "_ocr.txt";
                    File.WriteAllText(destFileName, ocredText.Text);
                    Console.WriteLine("Text file written to " + destFileName);

                    // ** Show the file
                    Console.WriteLine("Launching text file in reader");
                    Process.Start(destFileName);
                }
                else
                {
                    Console.WriteLine("No text file was produced.");
                }

                // ** Process the resulting PDF
                if (ocredPdfFile != null)
                {
                    // ** Write the processed file back to the file system with a PDF extension.
                    string destFileName = Path.GetFileNameWithoutExtension(sourceFileName) + "_ocr.pdf";
                    File.WriteAllBytes(destFileName, ocredPdfFile);
                    Console.WriteLine("PDF file written to " + destFileName);

                    // ** Open the generated PDF file in a PDF Reader
                    Console.WriteLine("Launching file in PDF Reader");
                    Process.Start(destFileName);
                }
                else
                {
                    Console.WriteLine("No PDF file was generated.");
                }
            }
            catch (FaultException <WebServiceFaultException> ex)
            {
                Console.WriteLine("FaultException occurred: ExceptionType: " +
                                  ex.Detail.ExceptionType.ToString());
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
            }
            finally
            {
                CloseService(client);
            }
            Console.ReadKey();
        }