public async Task <IEnumerable <Batch> > GetCurrentState(int?groupId = null) { groupId = groupId == null ? GroupId : groupId; using var batchContext = new BatchContext(); return(await batchContext.Batches.Where(batch => batch.GroupId.Equals(groupId)).OrderBy(ord => ord.BatchId).ToListAsync()); }
public ValueTask OnBatchRunCompleteAsync(BatchContext context, string errorMessageIfFailed, Exception exceptionIfExists) { this.CompleteSuccessfully = (errorMessageIfFailed == null && exceptionIfExists == null); this.ErrorMessage = errorMessageIfFailed; this.Exception = exceptionIfExists; return(innerInterceptor.OnBatchRunCompleteAsync(context, errorMessageIfFailed, exceptionIfExists)); }
public override BatchQueueItem[] Process(BatchQueueItem[] items) { var ret = new List <BatchQueueItem>(); foreach (var item in items) { try { using (var context = new BatchContext()) { var batch = context.Batches.Find(item.BatchId); batch.GenerateOutput(); batch.Status = BatchStatus.Completed; batch.CompletedTS = DateTime.Now; context.SaveChanges(); // Signal the notification batch.NotificationSemaphore().Signal(); } ret.Add(item); } catch (Exception ex) { Logger.LogException(ex); if (item.Impl.AttemptCount >= this.Impl.MaxRetry - 1) { // Max retry reached, clean up queueItem ret.Add(item); } } } return(ret.ToArray()); }
public static void ProcessFiles( [ActivityTrigger] BatchContext batchContext, TraceWriter log) { log.Info($"*** PROCESSFILES {batchContext.FolderName} - starting..."); // TODO - insert real processing here System.Threading.Thread.Sleep(2500); log.Info($"*** PROCESSFILES {batchContext.FolderName} - done, cleaning up...."); foreach (var filename in batchContext.RequiredFiles) { // TODO - replace local file access with blob access var path = Path.Combine(batchContext.FolderName, $"{batchContext.BatchId}_{filename}"); if (File.Exists(path)) { log.Verbose($"*** Deleting {path}"); File.Delete(path); } else { log.Error($"*** Missing file {path}"); // shouldn't hit this! } } log.Info($"*** PROCESSFILES {batchContext.FolderName} - done"); }
public ActionResult Retry(int id) { using (var context = new BatchContext()) { var batch = LoadBatch(id, context); if (batch != null && (batch.Status == BatchStatus.Completed || batch.Status == BatchStatus.Notified)) { batch.Status = BatchStatus.Created; batch.ResetCounters(); if (batch.Lines != null) { context.BatchLines.RemoveRange(batch.Lines); } context.SaveChanges(); var extractionQueue = CprBroker.Engine.Queues.Queue.GetQueues <Queues.ExtractionQueue>().Single(); extractionQueue.Enqueue(new Queues.BatchQueueItem() { BatchId = batch.BatchId }); return(Json("Success.", JsonRequestBehavior.AllowGet)); } } return(Json("Unable to retry", JsonRequestBehavior.AllowGet)); }
User GetUser(BatchContext context, string userName) { var user = context.Users.Where(u => u.Name.Equals(userName)).FirstOrDefault(); if (user == null) { lock ("User-adding") { user = context.Users.Where(u => u.Name.Equals(userName)).FirstOrDefault(); if (user == null) { user = new Models.User() { Name = userName }; context.Users.Add(user); context.SaveChanges(); user = context.Users.Where(u => u.Name.Equals(userName)).FirstOrDefault(); context.Entry <User>(user).Collection(u => u.Batches).Load(); } } } return(user); }
public void EnqueueFile(System.IO.Stream stream, string name, int length, string userName) { using (var context = new BatchContext()) { var user = GetUser(context, userName); // Now we have a user object var batch = new Batch() { Size = length, SourceContents = new byte[length], Status = BatchStatus.Created, FileName = name, SubmittedTS = DateTime.Now, User = user }; stream.Read(batch.SourceContents, 0, length); context.Batches.Add(batch); context.SaveChanges(); var extractionQueue = CprBroker.Engine.Queues.Queue.GetQueues <Queues.ExtractionQueue>().Single(); extractionQueue.Enqueue(new Queues.BatchQueueItem() { BatchId = batch.BatchId }); } }
protected override void PerformTimerAction() { int daysToKeepFiles = Properties.Settings.Default.DaysToKeepBatchFiles; DateTime deleteOlderThan = DateTime.Now.AddDays(-daysToKeepFiles); using (BatchContext context = new BatchContext()) { IQueryable <Batch> batchesToDelete = context.Batches.Where(b => b.CompletedTS < deleteOlderThan); foreach (Batch batch in batchesToDelete) { try { context.Batches.Remove(batch); batch.SignalAllSemaphores(); Admin.LogSuccess("Mass PNR Lookup: Removed batch " + batch.BatchId); } catch (Exception e) { Admin.LogException(e); } } context.SaveChanges(); } }
public async Task TestCircuit(Func <BatchContext, Task> next, BatchContext context) { OnCircuitTest(EventArgs.Empty); _log.LogInformation($"Subscriber circuit breaker: testing circuit for subscriber '{_config.SubscriberName}'"); State = CircuitState.HalfOpen; await next(context).ConfigureAwait(false); }
public void Process_LargeBatch_OK(int batchSize) { var lines = Properties.Resources.Test_Opslag.Split(Environment.NewLine.ToCharArray(), StringSplitOptions.RemoveEmptyEntries).Take(2).ToList(); StringBuilder b = new StringBuilder((lines[1].Length + 2) * batchSize); b.AppendLine(lines[0]); for (int i = 0; i < batchSize; i++) { b.AppendLine(lines[1]); } var bytes = Commons.CsvEncoding.GetBytes(b.ToString()); var batch = new Batch() { FileName = "", Size = bytes.Length, SourceContents = bytes, SubmittedTS = DateTime.Now }; using (var context = new BatchContext()) { context.Batches.Add(batch); context.SaveChanges(); } var queueItem = new Queues.BatchQueueItem() { BatchId = batch.BatchId }; var queue = new ExtractionQueue(); queue.Process(new BatchQueueItem[] { queueItem }); }
public void SetFileTest() { string script = "scriptname"; BatchContext context = new BatchContext(1, script, 2); context.File = "test"; Assert.AreEqual("test", context.File); }
public void SetLineNumberTest() { string script = "scriptname"; BatchContext context = new BatchContext(1, script, 2); context.LineNumber = 10; Assert.AreEqual(10, context.LineNumber); }
public void ConstructorTest() { string script = "scriptname"; BatchContext context = new BatchContext(1, script, 2); Assert.AreEqual(script, context.File); Assert.AreEqual(1, context.BatchOffset); Assert.AreEqual(2, context.LineNumber); }
public override BatchQueueItem[] Process(BatchQueueItem[] items) { var ret = new List <BatchQueueItem>(); foreach (var item in items) { using (var context = new BatchContext()) { var batch = context.Batches.Find(item.BatchId); if (batch == null) { // This happens if the batch is deleted before extraction ret.Add(item); } else { try { var parser = batch.CreateParser(); // Delete existing lines if needed var oldLines = context.BatchLines.Where(bl => bl.Batch_BatchId == batch.BatchId); context.BatchLines.RemoveRange(oldLines); context.SaveChanges(); context.Entry <Batch>(batch).Reload(); var lines = parser.ToArray(); if (batch.Lines == null) { batch.Lines = new List <BatchLine>(); } Array.ForEach <BatchLine>(lines, bl => batch.Lines.Add(bl)); batch.EnqueueAllAfterExtraction(context); ret.Add(item); } catch (Exception ex) { Admin.LogException(ex); batch.Status = BatchStatus.Error; context.SaveChanges(); } if (item.Impl.AttemptCount >= this.Impl.MaxRetry - 1) { // Max retry reached, clean up queueItem ret.Add(item); } } } } return(ret.ToArray()); }
private void AddIfNotExists(Customer item) { if (Table.Any(x => x.FirstName == item.FirstName && x.LastName == item.LastName)) { return; } Insert(item); BatchContext.SaveChanges(); }
public async Task Invoke(Func <BatchContext, Task> next, BatchContext context) { if (State == CircuitState.Closed) { await next(context).ConfigureAwait(false); } else { await Task.Delay(TimeSpan.FromSeconds(_circuitTestIntervalInSeconds), context.TokenSource.Token).ConfigureAwait(false); await TestCircuit(next, context).ConfigureAwait(false); } }
public async Task Invoke(Func <BatchContext, Task> next, BatchContext context) { try { await next(context).ConfigureAwait(false); } catch (Exception ex) { _statisticsReporter.Increment("ProcessBatchInfrastructureException", $"Client=PatLite.{_config.SubscriberName}," + $"ExceptionType={ex.GetType()}"); throw; } }
public ActionResult Resume(int id) { using (var context = new BatchContext()) { var batch = LoadBatch(id, context); if (batch != null && batch.Status == BatchStatus.Paused) { batch.Status = BatchStatus.Processing; context.SaveChanges(); batch.SearchSemaphore().SignalAll(); return(Json("Success.", JsonRequestBehavior.AllowGet)); } } return(new HttpNotFoundResult()); }
public ActionResult Result(int id) { using (var context = new BatchContext()) { var batch = LoadBatch(id, context); if (batch != null && batch.GeneratedContents != null) { var ext = batch.FileName.Split('.').Skip(1).LastOrDefault(); ext = string.IsNullOrEmpty(ext) ? "" : "." + ext; return(new FileContentResult(batch.GeneratedContents, "application/unspecified") { FileDownloadName = string.Format("{0}-result{1}", batch.FileName, ext) }); } } return(new HttpNotFoundResult()); }
public override void Build() { // init fields m_InputDepth = Net.InputDepth; m_InputHeight = Net.InputHeight; m_InputWidth = Net.InputWidth; m_OutputDepth = Net[Net.LayerCount - 1].OutputDepth; m_Gradient = new double[Net.LayerCount][]; m_Values = new double[Net.LayerCount][][, ]; m_Errors = new double[Net.LayerCount][][, ]; for (int l = 0; l < Net.LayerCount; l++) { var layer = Net[l]; m_Gradient[l] = new double[layer.ParamCount]; m_Values[l] = new double[layer.OutputDepth][, ]; m_Errors[l] = new double[layer.OutputDepth][, ]; for (int p = 0; p < layer.OutputDepth; p++) { m_Values[l][p] = new double[layer.OutputHeight, layer.OutputWidth]; m_Errors[l][p] = new double[layer.OutputHeight, layer.OutputWidth]; } } // init optimizer if (m_Optimizer == null) { m_Optimizer = Registry.Optimizer.SGD; } // init scheduler if (m_LearningRateScheduler == null) { m_LearningRateScheduler = Registry.LearningRateScheduler.Constant(m_LearningRate); } // init batch context if (m_UseBatchParallelization) { m_BatchContext = new BatchContext(this, m_MaxBatchThreadCount); } }
public ActionResult Remove(int id) { try { using (var context = new BatchContext()) { var batch = LoadBatch(id, context); if (batch != null) { context.Batches.Remove(batch); context.SaveChanges(); batch.SignalAllSemaphores(); return(Json("Success.", JsonRequestBehavior.AllowGet)); } } } catch (Exception) { return(Json("An Error occured.", JsonRequestBehavior.AllowGet)); // DEBUGGING return Json(ex.Message+"\r\n"+ex.StackTrace+"\r\n\r\n"+ex.InnerException, JsonRequestBehavior.AllowGet); } return(new HttpStatusCodeResult(500)); }
public async Task WriteBatchAndNumberToDatabase(BatchAndNumberFullInfo batchAndNumberFullInfo) { //I am given to understand that this is an antequated way to do this. I agree. I am however struggling to utilise the dependency injection native to EF Core, as at the top of this page. //Right now, I just want to focus on functionality, but please be aware that I know this is not ideal. var optionsBuilder = new DbContextOptionsBuilder <BatchContext>(); optionsBuilder.UseSqlServer("Server=(local)\\sqlexpress;Database=RuanGatesBDAssessmentDB;Trusted_Connection=True;MultipleActiveResultSets=True;"); var batch = new Batch(); batch.BatchElements = new List <BatchElement>(); batch.BatchAndNumberInput = new BatchAndNumberInput(); batch.BatchAndNumberInput.Batches = batchAndNumberFullInfo.BatchAndNumberInputDetails.Batches; batch.BatchAndNumberInput.Numbers = batchAndNumberFullInfo.BatchAndNumberInputDetails.Numbers; batch.CollectionId = batchAndNumberFullInfo.BatchAndNumberInputDetails.RequestId; BatchElement batchElement = new BatchElement(); batchElement.BatchNumber = batchAndNumberFullInfo.BatchAndNumber.Batch; batchElement.NumbersRemaining = 4; batchElement.Aggregate = 12; batchElement.NumbersInBatch = new List <NumberInBatch>(); NumberInBatch number = new NumberInBatch(); number.Number = batchAndNumberFullInfo.BatchAndNumber.Number; batchElement.NumbersInBatch.Add(number); batch.BatchElements.Add(batchElement); batch.GrandTotal = 99; using (var ctx = new BatchContext(optionsBuilder.Options)) { ctx.Batches.Add(batch); await ctx.SaveChangesAsync(); } }
public async Task Invoke(Func <BatchContext, Task> next, BatchContext context) { _timer.Start(); await next(context).ConfigureAwait(false); var currentInterval = _timer.ElapsedMilliseconds / _intervalInMilliSeconds; var startInterval = (currentInterval - _rollingIntervals > 0) ? currentInterval - _rollingIntervals : 0; var messagesProcessedInLastInterval = 0; for (long i = startInterval; i <= currentInterval; i++) { var intervalFound = _messagesProcessed.FirstOrDefault(x => x.IntervalNumber == i); if (intervalFound != null) { messagesProcessedInLastInterval += intervalFound.MessagesProcessed; } } var elapsedSinceLastIntervalBegan = _timer.ElapsedMilliseconds - currentInterval * _intervalInMilliSeconds; var wholeIntervalsElapsed = currentInterval - startInterval; var elapsedTime = wholeIntervalsElapsed * _intervalInMilliSeconds + elapsedSinceLastIntervalBegan; double elapsedIntervals = (double)elapsedTime / (_intervalInMilliSeconds); var processingRate = _intervalsPerMinute * messagesProcessedInLastInterval / elapsedIntervals; if (processingRate > _rateLimit) { var targetTimePerMessage = 1000 * 60 / _rateLimit; var targetTime = messagesProcessedInLastInterval * targetTimePerMessage; var delay = targetTime - elapsedTime; OnThrottling(new ThrottlingEventArgs(delay)); await _throttler.Delay(delay).ConfigureAwait(false); } }
public override LineQueueItem[] Process(LineQueueItem[] items) { var ret = new List <LineQueueItem>(); using (var context = new BatchContext()) { foreach (var item in items) { bool itemSucceeded = false; var batchLine = context.BatchLines.Find(item.BatchLineId); if (batchLine == null) { if (item.Impl.AttemptCount >= this.Impl.MaxRetry - 1) { // Max attempts reached - signal and remove anyway ret.Add(item); } continue; } var partManager = new PartManager(); var soegObject = batchLine.ToSoegObject(); if (soegObject != null) { // Try to search locally first // Since Mass PNR Lookup does some name-matching itself first, we need the local search. // There could be a local person found by CPR Broker, that Mass PNR Lookup does not match, therefore using "SourceUsageOrder.LocalThenExternal" is not enough if (search(partManager, soegObject, batchLine, SourceUsageOrder.LocalOnly)) { itemSucceeded = true; } else { // If no local person was found, search Externally if (search(partManager, soegObject, batchLine, SourceUsageOrder.ExternalOnly)) { itemSucceeded = true; } } } else { batchLine.Error = "Invalid address"; } lock ("BatchCounts") { context.Entry <Batch>(batchLine.Batch).Reload(); // Reload to avoid overwriting the counts if (itemSucceeded) { batchLine.Batch.SucceededLines++; } // Queue management if (itemSucceeded) { ret.Add(item); // Decrement the wait count on the semaphore batchLine.Batch.GenerationSemaphore().Signal(); } else if (item.Impl.AttemptCount >= this.Impl.MaxRetry - 1) { // Max attempts reached - signal and remove anyway ret.Add(item); batchLine.Batch.FailedLines++; batchLine.Batch.GenerationSemaphore().Signal(); } // Save the result at this point context.SaveChanges(); } } } return(ret.ToArray()); }
public NumbersAndBatchesData(BatchContext context) { _context = context; }
public BatchesController(BatchContext context) { _context = context; }
public BatchReportRepository(BatchContext context) { _context = context; }
public PcsActiveTempParametersRepository(BatchContext batchContext) { _batchContext = batchContext; }
public PcsScoringSettingsController(BatchContext context) { _context = context; }
protected override IQueryable <Batch> LoadBatches(BatchContext context) { return(context.Batches.Include("User")); }