IEnumerable <PostprocessedMessage> MessagesEnumerator() { tracer.Info("Enumerator entered"); var readerAndProcessorCallback = new Callback(this); ISequentialMediaReaderAndProcessor <PieceOfWork> readerAndProcessor; if (!useMockThreading) { readerAndProcessor = new SequentialMediaReaderAndProcessor <PieceOfWork, PieceOfWork, ThreadLocalData>(readerAndProcessorCallback, currentParams.Cancellation); } else { var mockedReaderAndProcessorImpl = new SequentialMediaReaderAndProcessorMock <PieceOfWork, PieceOfWork, ThreadLocalData>(readerAndProcessorCallback); mockedReaderAndProcessor = mockedReaderAndProcessorImpl; readerAndProcessor = mockedReaderAndProcessorImpl; } using (readerAndProcessor) { for (; ;) { currentParams.Cancellation.ThrowIfCancellationRequested(); PieceOfWork currentPieceOfWork = readerAndProcessor.ReadAndProcessNextPieceOfData(); if (currentPieceOfWork == null) { break; } currentPieceOfWork.perfop.Milestone("Starting consuming"); tracer.Info("Messages in output buffer: {0}", currentPieceOfWork.outputBuffer.Count); // Here is tricky: returning bytes buffer of the piece of work that was handled previously. // Bytes buffer of current piece (currentPieceOfWork.streamData) can still be used // by a thread processing the piece following the current one. if (currentParams.Direction == MessagesParserDirection.Forward) { SafeReturnStreamDataToThePool(currentPieceOfWork.prevStreamData); } else { SafeReturnStreamDataToThePool(currentPieceOfWork.nextStreamData); } foreach (var m in currentPieceOfWork.outputBuffer) { yield return(m); } var tmp = Interlocked.Decrement(ref peicesOfWorkBeingProgressed); tracer.Info("Finished consuming piece of work #{0} ({1} are still being processed)", currentPieceOfWork.id, tmp); ReturnOutputBufferToThePool(currentPieceOfWork.outputBuffer); } } tracer.Info("Enumerator exited"); }
public virtual async Task <PieceOfWork> Save(PieceOfWork pieceOfWork) { await _pieceOfWorkRepository.CreateOrUpdateAsync(pieceOfWork); await _pieceOfWorkRepository.SaveChangesAsync(); return(pieceOfWork); }
public PieceOfWorkTests() { employee = new Employee("Test"); inactiveEmployee = new Employee("Inactive employee"); inactiveEmployee.InactivateEmployee(); powWithEmployee = new PieceOfWork("Do something", DateTime.Now, new Employee("Test name")); powWithNoEmployee = new PieceOfWork("Do something", DateTime.Now); }
public void Initialize(bool purgeExistingViews = false) { _logger.Info("Initializing event dispatcher with view managers: {0}", string.Join(", ", _viewManagers)); _logger.Debug("Initiating immediate full catchup"); _work.Enqueue(PieceOfWork.FullCatchUp(purgeExistingViews)); _logger.Debug("Starting automatic catchup timer with {0} ms interval", _automaticCatchUpTimer.Interval); _automaticCatchUpTimer.Start(); _worker.Start(); }
public async Task <ActionResult <PieceOfWork> > CreatePieceOfWork([FromBody] PieceOfWork pieceOfWork) { _log.LogDebug($"REST request to save PieceOfWork : {pieceOfWork}"); if (pieceOfWork.Id != 0) { throw new BadRequestAlertException("A new pieceOfWork cannot already have an ID", EntityName, "idexists"); } _applicationDatabaseContext.AddGraph(pieceOfWork); await _applicationDatabaseContext.SaveChangesAsync(); return(CreatedAtAction(nameof(GetPieceOfWork), new { id = pieceOfWork.Id }, pieceOfWork) .WithHeaders(HeaderUtil.CreateEntityCreationAlert(EntityName, pieceOfWork.Id.ToString()))); }
public async Task <IActionResult> UpdatePieceOfWork([FromBody] PieceOfWorkDto pieceOfWorkDto) { _log.LogDebug($"REST request to update PieceOfWork : {pieceOfWorkDto}"); if (pieceOfWorkDto.Id == 0) { throw new BadRequestAlertException("Invalid Id", EntityName, "idnull"); } PieceOfWork pieceOfWork = _mapper.Map <PieceOfWork>(pieceOfWorkDto); await _pieceOfWorkService.Save(pieceOfWork); return(Ok(pieceOfWork) .WithHeaders(HeaderUtil.CreateEntityUpdateAlert(EntityName, pieceOfWork.Id.ToString()))); }
public async Task <IActionResult> UpdatePieceOfWork([FromBody] PieceOfWork pieceOfWork) { _log.LogDebug($"REST request to update PieceOfWork : {pieceOfWork}"); if (pieceOfWork.Id == 0) { throw new BadRequestAlertException("Invalid Id", EntityName, "idnull"); } //TODO catch //DbUpdateConcurrencyException into problem _applicationDatabaseContext.Update(pieceOfWork); await _applicationDatabaseContext.SaveChangesAsync(); return(Ok(pieceOfWork) .WithHeaders(HeaderUtil.CreateEntityUpdateAlert(EntityName, pieceOfWork.Id.ToString()))); }
public async Task <ActionResult <PieceOfWorkDto> > CreatePieceOfWork([FromBody] PieceOfWorkDto pieceOfWorkDto) { _log.LogDebug($"REST request to save PieceOfWork : {pieceOfWorkDto}"); if (pieceOfWorkDto.Id != 0) { throw new BadRequestAlertException("A new pieceOfWork cannot already have an ID", EntityName, "idexists"); } PieceOfWork pieceOfWork = _mapper.Map <PieceOfWork>(pieceOfWorkDto); await _pieceOfWorkService.Save(pieceOfWork); return(CreatedAtAction(nameof(GetPieceOfWork), new { id = pieceOfWork.Id }, pieceOfWork) .WithHeaders(HeaderUtil.CreateEntityCreationAlert(EntityName, pieceOfWork.Id.ToString()))); }
public void Initialize(IEventStore eventStore, bool purgeExistingViews = false) { if (eventStore == null) { throw new ArgumentNullException("eventStore"); } _logger.Info("Initializing event dispatcher with view managers: {0}", string.Join(", ", _viewManagers)); _logger.Debug("Initiating immediate full catchup"); _work.Enqueue(PieceOfWork.FullCatchUp(purgeExistingViews: purgeExistingViews)); _logger.Debug("Starting automatic catchup timer with {0} ms interval", _automaticCatchUpTimer.Interval); _automaticCatchUpTimer.Start(); _worker.Start(); }
public async Task <bool> Handle(AddPeaceOfWorkCommand request, CancellationToken cancellationToken) { if (!request.IsValid()) { return(false); } var pow = new PieceOfWork(request.Name, request.CreatedAt, request.Employee); await _repository.Add(pow); await _repository.UnitOfWork.Commit(); return(true); }
public void EqualsVerifier() { TestUtil.EqualsVerifier(typeof(PieceOfWork)); var pieceOfWork1 = new PieceOfWork { Id = 1L }; var pieceOfWork2 = new PieceOfWork { Id = pieceOfWork1.Id }; pieceOfWork1.Should().Be(pieceOfWork2); pieceOfWork2.Id = 2L; pieceOfWork1.Should().NotBe(pieceOfWork2); pieceOfWork1.Id = 0; pieceOfWork1.Should().NotBe(pieceOfWork2); }
public ViewManagerEventDispatcher(IAggregateRootRepository aggregateRootRepository, IEventStore eventStore, IDomainEventSerializer domainEventSerializer, IDomainTypeNameMapper domainTypeNameMapper, params IViewManager[] viewManagers) { if (aggregateRootRepository == null) { throw new ArgumentNullException("aggregateRootRepository"); } if (eventStore == null) { throw new ArgumentNullException("eventStore"); } if (domainEventSerializer == null) { throw new ArgumentNullException("domainEventSerializer"); } if (domainTypeNameMapper == null) { throw new ArgumentNullException("domainTypeNameMapper"); } if (viewManagers == null) { throw new ArgumentNullException("viewManagers"); } _aggregateRootRepository = aggregateRootRepository; _eventStore = eventStore; _domainEventSerializer = domainEventSerializer; _domainTypeNameMapper = domainTypeNameMapper; viewManagers.ToList().ForEach(view => _viewManagers.Enqueue(view)); _worker = new Thread(DoWork) { IsBackground = true }; _automaticCatchUpTimer.Elapsed += delegate { _work.Enqueue(PieceOfWork.FullCatchUp(false)); }; AutomaticCatchUpInterval = TimeSpan.FromSeconds(1); }
public void Dispatch(IEnumerable <DomainEvent> events) { if (events == null) { throw new ArgumentNullException("events"); } var list = events.ToList(); if (!list.Any()) { return; } var maxSequenceNumberInBatch = list.Max(e => e.GetGlobalSequenceNumber()); Interlocked.Exchange(ref _sequenceNumberToCatchUpTo, maxSequenceNumberInBatch); _work.Enqueue(PieceOfWork.JustCatchUp(list)); }
private void InitTest() { _pieceOfWork = CreateEntity(); }