/// <summary> /// Returns a new instance of the <see cref="TransactionPreconditionsFailedException"/> by retrieving /// the failed precondition strings from the provided <see cref="ExceptionDetailObject"/> instance. /// </summary> /// <remarks>This method is used to "deserialize" a precondition failure exception from the ExceptionInfo /// field of a <see cref="IJobInfo"/> instance.</remarks> /// <param name="exceptionDetail"></param> /// <returns>A new <see cref="TransactionPreconditionsFailedException"/>.</returns> internal static Exception FromExceptionDetail(ExceptionDetailObject exceptionDetail) { string existenceFailures; string nonexistenceFailures; exceptionDetail.Data.TryGetValue("existenceFailedTriples", out existenceFailures); exceptionDetail.Data.TryGetValue("nonexistenceFailedTriples", out nonexistenceFailures); return(new TransactionPreconditionsFailedException(existenceFailures, nonexistenceFailures)); }
private static void ExtractSyntaxError(ExceptionDetailObject exceptionDetail) { while (exceptionDetail != null) { if (exceptionDetail.Type.Equals("VDS.RDF.Parsing.RdfParseException")) { throw new RdfParseException(exceptionDetail.Message); } exceptionDetail = exceptionDetail.InnerException; } }
public override void Run() { try { Logging.LogInfo("Import job being run on file " + _contentFileName); StoreWorker.TransactionLog.LogStartTransaction(this); var parser = GetParser(_contentFileName); var storeDirectory = StoreWorker.WriteStore.DirectoryPath; var importDirectory = Path.Combine(Path.GetDirectoryName(storeDirectory), "import"); var filePath = Path.Combine(importDirectory, _contentFileName); var profiler = Logging.IsProfilingEnabled ? new BrightstarProfiler("Import " + _contentFileName) : null; Logging.LogDebug("Import file path calculated as '{0}'", filePath); using (_fileStream = GetImportFileStream(filePath)) { _importTripleSink = new StoreTripleSink(StoreWorker.WriteStore, JobId, Configuration.TransactionFlushTripleCount, profiler: profiler); parser.Parse(_fileStream, this, _graphUri); } StoreWorker.WriteStore.Commit(JobId, profiler); StoreWorker.InvalidateReadStore(); Logging.LogInfo("Import job completed successfully for " + _contentFileName); if (profiler != null) { Logging.LogInfo(profiler.GetLogString()); } StoreWorker.TransactionLog.LogEndSuccessfulTransaction(this); } catch (RdfParserException parserException) { ErrorMessage = parserException.Message; ExceptionDetail = new ExceptionDetailObject(parserException); Logging.LogInfo("Parser error processing import job on file " + _contentFileName + ". " + parserException.Message); throw; } catch (Exception ex) { ErrorMessage = "Error importing file " + _contentFileName + ". " + ex.Message; StoreWorker.TransactionLog.LogEndFailedTransaction(this); Logging.LogInfo("Error processing import job on file " + _contentFileName + ". Error Message: " + ex.Message + " Stack trace: " + ex.StackTrace); throw; } }
public static string ExtractExceptionMessages(this ExceptionDetailObject exceptionDetailObject, bool stopOnFirstDetailMessage = false) { var msg = string.Empty; while (exceptionDetailObject != null) { if (!string.IsNullOrEmpty(exceptionDetailObject.Message)) { if (stopOnFirstDetailMessage) { return(exceptionDetailObject.Message); } msg += exceptionDetailObject.Message; } exceptionDetailObject = exceptionDetailObject.InnerException; } return(msg); }
public SparqlUpdateException(ExceptionDetailObject innerDetail) : base("An error occurred while executing the SPARQL update: " + innerDetail.Message) { }