private IReadOnlyCollection <Version.ValidationResult> QueryTarget(MessageTypeCode messageType, long version) { using (Probe.Create("Query Cache")) { if (_cache.TryGet(messageType, out var targetValidationResults)) { return(targetValidationResults); } } using (Probe.Create("Query Target")) { var targetValidationResults = _query .For <Version.ValidationResult>() .Where(x => x.MessageType == (int)messageType) .ForVersion(version) .ApplyVersionId(0) .ToList(); _cache.Initialize(messageType, targetValidationResults); return(targetValidationResults); } }
public void Complete(IEnumerable <IMessage> successfullyProcessedMessages, IEnumerable <IMessage> failedProcessedMessages) { using (Probe.Create("Complete Erm Operations")) { _receiver.Complete(successfullyProcessedMessages, failedProcessedMessages); } }
private MergeResult <Version.ValidationResult> CalculateValidationRuleChanges(IReadOnlyCollection <Version.ValidationResult> currentVersionResults, MessageTypeCode ruleCode, Expression <Func <Version.ValidationResult, bool> > filter) { try { List <Version.ValidationResult> sourceObjects; using (Probe.Create("Query Source")) using (new TransactionScope(TransactionScopeOption.RequiresNew, _transactionOptions)) { // Запрос к данным посылаем вне транзакции, иначе будет DTC var accessor = _accessors[ruleCode]; var query = accessor.GetSource().Where(filter); sourceObjects = query.ToList(); } using (Probe.Create("Merge")) { var destObjects = currentVersionResults.Where(x => x.MessageType == (int)ruleCode).Where(filter.Compile()); var mergeResult = MergeTool.Merge(sourceObjects, destObjects, _equalityComparer); return(mergeResult); } } catch (Exception ex) { throw new Exception($"Ошибка при вычислении правила {ruleCode}", ex); } }
public void Recalculate(IEnumerable <RecalculateStatisticsOperation> operations) { MetadataSet metadataSet; if (!_metadataProvider.TryGetMetadata <StatisticsRecalculationMetadataIdentity>(out metadataSet)) { throw new NotSupportedException($"Metadata for identity '{typeof(StatisticsRecalculationMetadataIdentity).Name}' cannot be found."); } var batches = operations.GroupBy(x => x.EntityId.ProjectId).ToArray(); using (Probe.Create("Recalculate Statistics Operations")) { var metadata = metadataSet.Metadata.Values.SelectMany(x => x.Elements).ToArray(); foreach (var element in metadata) { var processor = _statisticsProcessorFactory.Create(element); foreach (var batch in batches) { processor.Execute(batch.ToArray()); } } } }
private IEnumerable <IEvent> Handle(IReadOnlyCollection <ISyncDataObjectCommand> commands) { if (commands.Count == 0) { return(Enumerable.Empty <IEvent>()); } var dataObjectTypes = commands.Select(x => x.DataObjectType).ToHashSet(); var actors = _dataObjectsActorFactory.Create(dataObjectTypes); var eventCollector = new FactsEventCollector(); foreach (var actor in actors) { using (Probe.Create($"ETL1 {actor.GetType().GetFriendlyName()}")) { var events = actor.ExecuteCommands(commands); eventCollector.Add(events); } } _syncEntityNameActor.ExecuteCommands(commands); return(eventCollector.Events()); }
public IReadOnlyList <IMessage> Peek() { using (Probe.Create("Peek Aggregate Operations")) { return(_receiver.Peek()); } }
private void Execute(Type command, Type aggregate, IEnumerable <AggregateOperation> commands) { var processor = CreateProcessor(aggregate); using (var transaction = new TransactionScope(TransactionScopeOption.Required, new TransactionOptions { IsolationLevel = IsolationLevel.ReadCommitted, Timeout = TimeSpan.Zero })) { using (Probe.Create($"ETL2 {command.Name} {aggregate.Name}")) { if (command == typeof(InitializeAggregate)) { processor.Initialize(commands.Cast <InitializeAggregate>().ToArray()); } else if (command == typeof(RecalculateAggregate)) { processor.Recalculate(commands.Cast <RecalculateAggregate>().ToArray()); } else if (command == typeof(DestroyAggregate)) { processor.Destroy(commands.Cast <DestroyAggregate>().ToArray()); } else { throw new InvalidOperationException($"The command of type {command.Name} is not supported"); } } transaction.Complete(); } }
public IEnumerable <StageResult> Handle(IReadOnlyDictionary <Guid, List <IAggregatableMessage> > processingResultsMap) { try { var commands = processingResultsMap.SelectMany(x => x.Value).Cast <AggregatableMessage <ICommand> >().SelectMany(x => x.Commands).ToList(); using (Probe.Create("ETL2 Transforming")) using (var transaction = new TransactionScope(TransactionScopeOption.Required, _transactionOptions)) { var syncEvents = Handle(commands.OfType <IAggregateCommand>().ToList()) .Select(x => new FlowEvent(AggregatesFlow.Instance, x)).ToList(); using (new TransactionScope(TransactionScopeOption.Suppress)) _eventLogger.Log <IEvent>(syncEvents); transaction.Complete(); } var stateEvents = Handle(commands.OfType <IncrementErmStateCommand>().ToList()).Concat( Handle(commands.OfType <IncrementAmsStateCommand>().ToList())).Concat( Handle(commands.OfType <LogDelayCommand>().ToList())) .Select(x => new FlowEvent(AggregatesFlow.Instance, x)).ToList(); _eventLogger.Log <IEvent>(stateEvents); return(processingResultsMap.Keys.Select(bucketId => MessageProcessingStage.Handling.ResultFor(bucketId).AsSucceeded())); } catch (Exception ex) { _tracer.Error(ex, "Error when calculating aggregates"); return(processingResultsMap.Keys.Select(bucketId => MessageProcessingStage.Handling.ResultFor(bucketId).AsFailed().WithExceptions(ex))); } }
public IReadOnlyCollection <IEvent> ExecuteCommands(IReadOnlyCollection <ICommand> commands) { var aggregateCommands = commands.OfType <IAggregateCommand>() .Where(x => x.AggregateRootType == _aggregateRootActor.EntityType) .ToList(); if (aggregateCommands.Count == 0) { return(Array <IEvent> .Empty); } var aggregateNameParts = _aggregateRootActor.EntityType.FullName.Split('.').Reverse().ToList(); using (Probe.Create("Aggregate", aggregateNameParts[2], aggregateNameParts[0])) { var events = new List <IEvent>(); var recalculateCommands = aggregateCommands.OfType <AggregateCommand.Recalculate>() .SelectMany(next => new ICommand[] { new SyncDataObjectCommand(next.AggregateRootType, next.AggregateRootIds), new ReplaceValueObjectCommand(next.AggregateRootIds) }) .ToList(); events.AddRange(_rootToLeafActor.ExecuteCommands(recalculateCommands)); // TODO: вопрос: надо ли тут схлапывать events по distinct или не надо, проверить!!! return(events); } }
public static MergeResult <T> Merge <T>(IEnumerable <T> data1, IEnumerable <T> data2, IEqualityComparer <T> comparer) { if (comparer == null) { throw new ArgumentNullException(nameof(comparer)); } HashSet <T> set1; using (Probe.Create("Query source")) { set1 = new HashSet <T>(data1, comparer); } HashSet <T> set2; using (Probe.Create("Query target")) { set2 = new HashSet <T>(data2, comparer); } using (Probe.Create("Merge")) { // NOTE: avoiding enumerable extensions to reuse hashset performance var difference = set1.Where(x => !set2.Contains(x)); var intersection = set1.Where(x => set2.Contains(x)); var complement = set2.Where(x => !set1.Contains(x)); return(new MergeResult <T>(difference, intersection, complement)); } }
protected override void ExecuteInternal(IJobExecutionContext context) { if (string.IsNullOrEmpty(Flow)) { var msg = $"Required job arg {nameof(Flow)} is not specified, check job config"; throw new InvalidOperationException(msg); } try { using (Probe.Create(Flow)) { ProcessFlow(); } DecrementFailCount(context); } catch { IncrementFailCount(context); } finally { var reports = DefaultReportSink.Instance.ConsumeReports(); foreach (var report in reports) { _telemetry.Trace("ProbeReport", report); } } }
public IReadOnlyCollection <IEvent> ExecuteCommands(IReadOnlyCollection <ICommand> commands) { var commandsToExecute = commands.OfType <IReplaceValueObjectCommand>().Distinct().ToArray(); if (!commandsToExecute.Any()) { return(Array.Empty <IEvent>()); } using (Probe.Create("ValueObject", typeof(TDataObject).Name)) { var events = new List <IEvent>(); var changes = _changesProvider.GetChanges(commandsToExecute); var toDelete = changes.Complement.ToArray(); if (toDelete.Length != 0) { events.AddRange(_dataChangesHandler.HandleRelates(toDelete)); events.AddRange(_dataChangesHandler.HandleDeletes(toDelete)); _bulkRepository.Delete(toDelete); } var toCreate = changes.Difference.ToArray(); if (toCreate.Length != 0) { _bulkRepository.Create(toCreate); events.AddRange(_dataChangesHandler.HandleCreates(toCreate)); events.AddRange(_dataChangesHandler.HandleRelates(toCreate)); } return(events); } }
public IReadOnlyCollection <Version.ValidationResult> Execute(long orderId, ICheckModeDescriptor checkModeDescriptor) { // todo: можно использовать checkModeDescriptor для дальнейшей оптимизации var optimization = new Optimizer(); IStore Wrap(IStore store) => new OptimizerStore(optimization, store); using (Probe.Create("Execute")) using (var erm = new HashSetStoreFactory(_equalityComparerFactory)) using (var store = new PersistentTableStoreFactory(_equalityComparerFactory, _webAppMappingSchema)) using (var messages = new HashSetStoreFactory(_equalityComparerFactory)) { IReadOnlyCollection <Replicator> factReplicators; IReadOnlyCollection <Replicator> aggregateReplicators; IReadOnlyCollection <Replicator> messageReplicators; using (Probe.Create("Initialization")) { factReplicators = CreateReplicators(_factAccessorTypes, erm.CreateQuery(), Wrap(store.CreateStore())); aggregateReplicators = CreateReplicators(_aggregateAccessorTypes, store.CreateQuery(), Wrap(store.CreateStore())); messageReplicators = CreateReplicators(_messageAccessorTypes, store.CreateQuery(), Wrap(messages.CreateStore())) .Where(x => x.DataObjectType == typeof(Version.ValidationResult) && checkModeDescriptor.Rules.ContainsKey(x.Rule)).ToList(); var predicates = factReplicators.Concat(aggregateReplicators).Concat(messageReplicators).SelectMany(x => x.DependencyPredicates); optimization.PrepareToUse(predicates.ToHashSet()); } ErmDataLoader.ResolvedOrderSummary orderSummary; using (Probe.Create("Erm -> Erm slice")) { ReadErmSlice(orderId, Wrap(erm.CreateStore()), out orderSummary); } using (Probe.Create("Erm slice -> WebApp Facts")) { _strategy.ProcessFacts(factReplicators, aggregateReplicators, messageReplicators, optimization); } using (Probe.Create("WebApp Facts -> WebApp Aggregates")) { _strategy.ProcessAggregates(aggregateReplicators, messageReplicators, optimization); } using (Probe.Create("WebApp Aggregates -> WebApp Messages")) { _strategy.ProcessMessages(messageReplicators, optimization); } var validationPeriodStart = GetValidationPeriodStart(erm.CreateQuery(), orderId, checkModeDescriptor); using (Probe.Create("Query result")) { return(messages.CreateQuery() .For <Version.ValidationResult>() .Where(x => x.OrderId == orderId && checkModeDescriptor.Rules.Keys.Contains((MessageTypeCode)x.MessageType) && x.PeriodEnd >= validationPeriodStart) .ToList()); } } }
public void Delete(IEnumerable <TDataObject> objects) { using (Probe.Create("Delete")) { _repository.DeleteRange(objects); _repository.Save(); } }
private IEnumerable <IOperation> ProcessDependencies(IReadOnlyCollection <long> factIds, MapToObjectsSpecProvider <TFact, IOperation> operationFactory) { using (Probe.Create("Querying dependent aggregates")) { var filter = _metadata.FindSpecificationProvider.Invoke(factIds); return(operationFactory.Invoke(filter).Map(_query).ToArray()); } }
public IReadOnlyList <IMessage> Peek() { using (Probe.Create("Peek messages from ServiceBus")) { var messages = _receiver.Peek(); return(messages); } }
private IEnumerable <IOperation> Process(IReadOnlyCollection <long> factIds) { using (Probe.Create("Querying dependent entities")) { var filter = _findSpecificationProvider.Create(factIds); return(_metadata.DependentEntitySpecProvider.Invoke(filter).Map(_query).Select(key => _commandFactory.Create(_metadata.EntityType, key)).ToArray()); } }
public void Push(IEnumerable <TOperation> operations) { using (Probe.Create($"Send {typeof(TOperation).Name}")) { var transportMessages = operations.Select(operation => _serializer.Serialize(operation, _targetFlow)); Save(transportMessages.ToArray()); } }
public void Delete(IEnumerable <TTarget> objects) { using (Probe.Create("Deleting", typeof(TTarget).Name)) { _repository.DeleteRange(objects); _repository.Save(); } }
public void Create(IEnumerable <TDataObject> objects) { using (Probe.Create("Insert")) { _repository.AddRange(objects); _repository.Save(); } }
public IReadOnlyList <IMessage> Peek() { using (Probe.Create("Peek messages from Kafka")) { var messages = _receiver.Peek(); _publisher.Peeked(messages.Count); return(messages); } }
public IReadOnlyCollection <IEvent> ExecuteCommands(IReadOnlyCollection <ICommand> commands) { // Общая идея: "результаты проверок соответствуют указанному состоянию ERM или более позднему" // Конкретные идеи: // 1. Набор ValidationResult для версии не меняется, таблица ValidationResult работает только на наполнение (за исключениям операции архивирования) // 2. Набор ErmStates для версии мутабелен, при этом может быть пустым (если мы уже обработали изменения, но пока не знаем, какой версии они соответствуют) // 3. Версия без изменений не создаётся. var currentVersion = _query.For <Version>().OrderByDescending(x => x.Id).Take(1).AsEnumerable().First().Id; var newValidationResults = new List <Version.ValidationResult>(); var resolvedValidationResults = new List <Version.ValidationResult>(); var ruleGroups = commands.OfType <IRecalculateValidationRuleCommand>().GroupBy(x => x.Rule).ToList(); if (ruleGroups.Count != 0) { foreach (var ruleCommands in ruleGroups) { using (Probe.Create($"Rule {ruleCommands.Key}")) { var targetValidationResults = QueryTarget(ruleCommands.Key, currentVersion); var filter = CreateFilter(ruleCommands); var validationRuleResult = CalculateValidationRuleChanges(targetValidationResults, ruleCommands.Key, filter); var newResults = validationRuleResult.Difference.ToList(); var resolvedResults = validationRuleResult.Complement.ToList(); newValidationResults.AddRange(newResults); resolvedValidationResults.AddRange(resolvedResults); // validationRuleResult.Intersection не используется, т.к. он содержит только те записи, что прошли через filter UpdateCache(ruleCommands.Key, targetValidationResults, newResults, resolvedResults); } } } var ermStates = commands.OfType <StoreErmStateCommand>().SelectMany(x => x.States).ToList(); var amsStates = commands.OfType <StoreAmsStateCommand>().Select(x => x.State).ToList(); if (newValidationResults.Count > 0 || resolvedValidationResults.Count > 0) { using (Probe.Create("Create New Version")) { CreateVersion(currentVersion + 1, newValidationResults.Concat(resolvedValidationResults.ApplyResolved()).ToList(), ermStates, amsStates); } } else { using (Probe.Create("Update Existing Version")) { UpdateVersion(currentVersion, ermStates, amsStates); } } return(Array.Empty <IEvent>()); }
public override GameObject CreateGameObject(ReferenceData referenceData, Transform parent = null) { var gameObject = base.CreateGameObject(referenceData, parent); var ownerData = new OwnerData(referenceData.Owner, referenceData.Global, referenceData.Faction, referenceData.Rank); var uses = referenceData.Health == -1 ? data.MaxUses : referenceData.Health; Probe.Create(gameObject, this, referenceData); return(gameObject); }
public void Complete(IEnumerable <IMessage> successfullyProcessedMessages, IEnumerable <IMessage> failedProcessedMessages) { using (Probe.Create("Complete Kafka messages")) { var succeeded = successfullyProcessedMessages.Cast <KafkaMessageBatch>().ToList(); var failed = failedProcessedMessages.Cast <KafkaMessageBatch>().ToList(); _receiver.Complete(succeeded, failed); } }
public PersistentTableStoreFactory(LockManager lockManager, SchemaManager schemaManager) { using (Probe.Create("Get lock")) { _lockManager = lockManager; _lock = _lockManager.GetLock(); _connection = new DataConnection("Messages").AddMappingSchema(schemaManager.GetSchema(_lock)); _connection.BeginTransaction(System.Data.IsolationLevel.Snapshot); } }
public void Push <TEvent, TFlow>(TFlow targetFlow, IReadOnlyCollection <TEvent> events) where TFlow : IMessageFlow where TEvent : IEvent { using (Probe.Create($"Send {typeof(TEvent).Name}")) { var transportMessages = events.Select(x => Serialize(x)).ToArray(); Save(transportMessages, targetFlow.Id); } }
protected override void ExecuteInternal(IJobExecutionContext context) { var archiveDate = DateTime.UtcNow - _settings.ArchiveVersionsInterval; using (Probe.Create("Archive")) using (var transaction = new TransactionScope(TransactionScopeOption.Required, _transactionOptions)) { _archiveVersionsService.Execute(archiveDate); transaction.Complete(); } }
public IReadOnlyList <IMessage> Peek() { using (Probe.Create("Peek Erm Operations")) { var messages = _receiver.Peek(); var serviceBusMessageCount = messages.Cast <ServiceBusPerformedOperationsMessage>().Sum(x => x.Operations.Count()); _telemetryPublisher.Publish <ErmReceivedUseCaseCountIdentity>(serviceBusMessageCount); return(messages); } }
private void Handle(IReadOnlyCollection <IValidationRuleCommand> commands) { if (commands.Count == 0) { return; } using (Probe.Create("ValidationRuleActor")) { _validationRuleActor.ExecuteCommands(commands); } }
public void Update(IEnumerable <TTarget> objects) { using (Probe.Create("Updating", typeof(TTarget).Name)) { foreach (var obj in objects) { _repository.Update(obj); } _repository.Save(); } }