void DataRetrievingWorker() { string jobId = string.Format("worker_{0}", WorkerId); while (_docsInProgress > 0) { dynamic results = null; Stopwatch watcher = new Stopwatch(); DateTime retrieved = DateTime.MinValue; try { watcher.Start(); results = _engine.GetProcessedDocumentsByJobId(jobId); watcher.Stop(); retrieved = DateTime.Now; } catch (Exception ex) { SemantriaWorkerException workerException = new SemantriaWorkerException("An exception occured during retrieval of the documents from the server.", ex); OnErrorOccurred(workerException); } if (results == null || results.Count == 0) { Thread.Sleep(PullingTimeout); continue; } OnMethodExecuted(new RequestMetrics(SemantriaAPImethod.RetrieveResultsByJobId, watcher.Elapsed, results.Count)); foreach (dynamic doc in results) { DocumentMetrics metrics = new DocumentMetrics(); lock (_syncTracker) { if (!_docsTracker.ContainsKey(doc.id)) { continue; } _docsTracker[doc.id].Retrieved = retrieved; metrics = _docsTracker[doc.id]; _docsInProgress--; } OnOutputReceived(doc, metrics); } } }
int QueueBatch(List <dynamic> batch) { Stopwatch watcher = new Stopwatch(); int status = -1; try { watcher.Start(); status = _engine.QueueBatchOfDocuments(batch, _configId); watcher.Stop(); } catch (Exception ex) { SemantriaWorkerException workerException = new SemantriaWorkerException("An exception occured during document queuing.", ex); OnErrorOccurred(workerException); } if (status != -1) { OnMethodExecuted(new RequestMetrics(SemantriaAPImethod.QueueBatch, watcher.Elapsed, batch.Count)); _dataRetrievingWorkers.RemoveAll(worker => !worker.IsAlive); if (_dataRetrievingWorkers.Count < NumberOfRetrievalThreads) { for (int i = _dataRetrievingWorkers.Count; i < NumberOfRetrievalThreads; i++) { Thread worker = new Thread(DataRetrievingWorker); _dataRetrievingWorkers.Add(worker); worker.Start(); } } lock (_syncTracker) { _docsInProgress += batch.Count; } } return(status); }
public bool Initialize(params object[] args) { if (_engine != null) { _engine.Dispose(); _engine = null; } try { _engine = Session.CreateSession(_apiKey, _apiSecret); _engine.Error += _engine_Error; } catch (Exception ex) { SemantriaWorkerException workerException = new SemantriaWorkerException("Semantria session initialization failed!", ex); OnErrorOccurred(workerException); return(false); } return(true); }
void _engine_Error(object sender, ResponseErrorEventArgs ea) { SemantriaWorkerException workerException = new SemantriaWorkerException(ea.Message, ea.Status); ErrorOccurredCallback(this, workerException); }
int QueueBatch(List<dynamic> batch) { Stopwatch watcher = new Stopwatch(); int status = -1; try { watcher.Start(); status = _engine.QueueBatchOfDocuments(batch, _configId); watcher.Stop(); } catch (Exception ex) { SemantriaWorkerException workerException = new SemantriaWorkerException("An exception occured during document queuing.", ex); OnErrorOccurred(workerException); } if (status != -1) { OnMethodExecuted(new RequestMetrics(SemantriaAPImethod.QueueBatch, watcher.Elapsed, batch.Count)); _dataRetrievingWorkers.RemoveAll(worker => !worker.IsAlive); if (_dataRetrievingWorkers.Count < NumberOfRetrievalThreads) { for (int i = _dataRetrievingWorkers.Count; i < NumberOfRetrievalThreads; i++) { Thread worker = new Thread(DataRetrievingWorker); _dataRetrievingWorkers.Add(worker); worker.Start(); } } lock (_syncTracker) { _docsInProgress += batch.Count; } } return status; }
public bool Initialize(params object[] args) { if (_engine != null) { _engine.Dispose(); _engine = null; } try { _engine = Session.CreateSession(_apiKey, _apiSecret); _engine.Error += _engine_Error; } catch (Exception ex) { SemantriaWorkerException workerException = new SemantriaWorkerException("Semantria session initialization failed!", ex); OnErrorOccurred(workerException); return false; } return true; }