public override void Run() { try { Logging.LogInfo("SPARQL update job being run on expression '{0}'", _updateExpression); StoreWorker.TransactionLog.LogStartTransaction(this); var processor = new BrightstarUpdateProcessor(new BrightstarIOManager(StoreWorker.WriteStore)); var cmds = _parser.ParseFromString(_updateExpression); processor.ProcessCommandSet(cmds); StoreWorker.WriteStore.Commit(JobId); StoreWorker.InvalidateReadStore(); Logging.LogInfo("SPARQL update job completed successfully"); StoreWorker.TransactionLog.LogEndSuccessfulTransaction(this); } catch (RdfException ex) { StoreWorker.TransactionLog.LogEndFailedTransaction(this); Logging.LogInfo("Error processing SPARQL update expression '{0}'. Error Message: {1} Stack Trace: {2}", _updateExpression, ex.Message, ex.StackTrace); ErrorMessage = String.Format("Error processing SPARQL update expression. {0}", ex.Message); throw; } catch (Exception ex) { StoreWorker.TransactionLog.LogEndFailedTransaction(this); Logging.LogInfo("Error processing SPARQL update expression '{0}'. Error Message: {1} Stack Trace: {2}", _updateExpression, ex.Message, ex.StackTrace); throw; } }
public override void Run() { try { Logging.LogInfo("Import job being run on file " + _contentFileName); StoreWorker.TransactionLog.LogStartTransaction(this); var parser = GetParser(_contentFileName); var storeDirectory = StoreWorker.WriteStore.DirectoryPath; var filePath = Path.Combine(storeDirectory, ".." + Path.DirectorySeparatorChar + "import" + Path.DirectorySeparatorChar + _contentFileName); var profiler = new BrightstarProfiler("Import " + _contentFileName); // TODO : Conditionally create this if profiling is enabled Logging.LogDebug("Import file path calculated as '{0}'", filePath); if (!File.Exists(filePath)) { ErrorMessage = String.Format("Cannot find file {0} in import directory", _contentFileName); throw new FileNotFoundException(ErrorMessage); } using (_fileStream = new FileStream(filePath, FileMode.Open, FileAccess.Read)) { _importTripleSink = new StoreTripleSink(StoreWorker.WriteStore, JobId, Configuration.TransactionFlushTripleCount, profiler: profiler); parser.Parse(_fileStream, this, _graphUri); } StoreWorker.WriteStore.Commit(JobId, profiler); StoreWorker.InvalidateReadStore(); Logging.LogInfo("Import job completed successfully for " + _contentFileName); if (profiler != null) { Logging.LogInfo(profiler.GetLogString()); } StoreWorker.TransactionLog.LogEndSuccessfulTransaction(this); } catch (RdfParserException parserException) { ErrorMessage = parserException.Message; ExceptionDetail = new ExceptionDetail(parserException); Logging.LogInfo("Parser error processing import job on file " + _contentFileName + ". " + parserException.Message); throw; } catch (Exception ex) { ErrorMessage = "Error importing file " + _contentFileName + ". " + ex.Message; StoreWorker.TransactionLog.LogEndFailedTransaction(this); Logging.LogInfo("Error processing import job on file " + _contentFileName + ". Error Message: " + ex.Message + " Stack trace: " + ex.StackTrace); throw; } }
public override void Run() { try { Logging.LogInfo("Import job being run on file " + _contentFileName); StoreWorker.TransactionLog.LogStartTransaction(this); var parser = GetParser(_contentFileName); var storeDirectory = StoreWorker.WriteStore.DirectoryPath; var importDirectory = Path.Combine(Path.GetDirectoryName(storeDirectory), "import"); var filePath = Path.Combine(importDirectory, _contentFileName); var profiler = Logging.IsProfilingEnabled ? new BrightstarProfiler("Import " + _contentFileName) : null; Logging.LogDebug("Import file path calculated as '{0}'", filePath); using (_fileStream = GetImportFileStream(filePath)) { _importTripleSink = new StoreTripleSink(StoreWorker.WriteStore, JobId, Configuration.TransactionFlushTripleCount, profiler: profiler); parser.Parse(_fileStream, this, _graphUri); } StoreWorker.WriteStore.Commit(JobId, profiler); StoreWorker.InvalidateReadStore(); Logging.LogInfo("Import job completed successfully for " + _contentFileName); if (profiler != null) { Logging.LogInfo(profiler.GetLogString()); } StoreWorker.TransactionLog.LogEndSuccessfulTransaction(this); } catch (RdfParserException parserException) { ErrorMessage = parserException.Message; ExceptionDetail = new ExceptionDetailObject(parserException); Logging.LogInfo("Parser error processing import job on file " + _contentFileName + ". " + parserException.Message); throw; } catch (Exception ex) { ErrorMessage = "Error importing file " + _contentFileName + ". " + ex.Message; StoreWorker.TransactionLog.LogEndFailedTransaction(this); Logging.LogInfo("Error processing import job on file " + _contentFileName + ". Error Message: " + ex.Message + " Stack trace: " + ex.StackTrace); throw; } }
public override void Run() { try { StoreWorker.TransactionLog.LogStartTransaction(this); var writeStore = StoreWorker.WriteStore; // process preconditions Logging.LogInfo("UpdateTransaction {0} - processing preconditions", JobId); try { var preconditionSink = new PreconditionSink(writeStore, PreconditionSink.PreconditionType.ExistsPrecondition); var parser = new NTriplesParser(); parser.Parse(new StringReader(_preconditions), preconditionSink, _defaultGraphUri); if (preconditionSink.FailedPreconditionCount > 0) { throw new PreconditionFailedException(preconditionSink.FailedPreconditionCount, preconditionSink.GetFailedPreconditions(), 0, String.Empty); } } catch (RdfParserException parserException) { throw new BrightstarClientException("Syntax error in preconditions.", parserException); } // process deletes Logging.LogInfo("UpdateTransaction {0} - processing deletes", JobId); try { var delSink = new DeletePatternSink(writeStore); var parser = new NTriplesParser(); parser.Parse(new StringReader(_deletePatterns), delSink, _defaultGraphUri); } catch (RdfParserException parserException) { throw new BrightstarClientException("Syntax error in delete patterns.", parserException); } try { // insert data Logging.LogInfo("UpdateTransaction {0} - processing inserts", JobId); var parser = new NTriplesParser(); parser.Parse(new StringReader(_insertData), new StoreTripleSink(writeStore, JobId, Configuration.TransactionFlushTripleCount), _defaultGraphUri); } catch (RdfParserException parserException) { throw new BrightstarClientException("Syntax error in triples to add.", parserException); } // commit changes Logging.LogInfo("UpdateTransaction {0} - committing changes", JobId); writeStore.Commit(JobId); // change read store Logging.LogInfo("UpdateTransaction {0} - invalidating read store", JobId); StoreWorker.InvalidateReadStore(); // log txn completed Logging.LogInfo("UpdateTransaction {0} - logging completion", JobId); StoreWorker.TransactionLog.LogEndSuccessfulTransaction(this); Logging.LogInfo("UpdateTransaction {0} - done", JobId); } catch (PreconditionFailedException ex) { StoreWorker.TransactionLog.LogEndFailedTransaction(this); Logging.LogInfo("Preconditions failed in UpdateTransaction ({0}): Count={1}, Triples={2}", JobId, ex.ExistenceFailureCount, ex.ExistenceFailedTriples); throw; } catch (BrightstarClientException ex) { StoreWorker.TransactionLog.LogEndFailedTransaction(this); Logging.LogError(BrightstarEventId.TransactionClientError, "Client error reported in UpdateTransaction ({0}): {1}", JobId, ex.InnerException.ToString()); throw; } catch (Exception ex) { StoreWorker.TransactionLog.LogEndFailedTransaction(this); Logging.LogError(BrightstarEventId.TransactionServerError, "Unexpected exception caught in UpdateTransaction ({0}): {1}", JobId, ex); throw; } }