public async Task HandleAsync(ReplayPublicEventsRequested signal) { ReplayPublicEvents_Job job = jobFactory.CreateJob(signal); JobExecutionStatus result = await jobRunner.ExecuteAsync(job).ConfigureAwait(false); logger.Debug(() => "Rebuild projection version {@cronus_projection_rebuild}", result); }
public void TestDeleteStoreAfterUpdate() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } // var queryResult = storeWorker.Query("select * where { ?s ?p ?o }"); storeWorker.Shutdown(true, () => _storeManager.DeleteStore(Configuration.StoreLocation + "\\" + sid)); }
public void TestExportJob() { var sid = Guid.NewGuid().ToString(); using (var store = _storeManager.CreateStore(Configuration.StoreLocation + "\\" + sid)) { store.InsertTriple("http://www.example.org/alice", "http://xmlns.org/foaf/0.1/knows", "http://www.example.org/bob", false, null, null, Constants.DefaultGraphUri); store.InsertTriple("http://www.example.org/bob", "http://xmlns.org/foaf/0.1/knows", "http://www.example.org/alice", false, null, null, Constants.DefaultGraphUri); store.Commit(Guid.NewGuid()); } var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); var jobId = storeWorker.Export(sid + "_export.nt", null, RdfFormat.NQuads); JobExecutionStatus status = storeWorker.GetJobStatus(jobId.ToString()); while (status.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); status = storeWorker.GetJobStatus(jobId.ToString()); if (status.JobStatus == JobStatus.TransactionError) { Assert.Fail("Export job failed with a transaction error. Message={0}. Exception Detail={1}", status.Information, status.ExceptionDetail); } } }
public void TestTransactionWithPreconditionFails() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string preconds = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer>"; // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction(preconds, "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.AreEqual(jobStatus.JobStatus, JobStatus.TransactionError); }
public void TestGetErrorMessage() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.IsTrue(jobStatus.Information.Contains("Job Error"), "Unexpected job message: '{0}'", jobStatus.Information); Assert.IsTrue(jobStatus.ExceptionDetail.Message.Contains("Syntax error in triples to add."), "Unexpected job message: '{0}'", jobStatus.ExceptionDetail.Message); }
public void TestImportPerformance25M() { const string fileName = "bsbm_25m.nt"; if (!File.Exists(BrightstarDB.Configuration.StoreLocation + "\\import\\" + fileName)) { Assert.Inconclusive("Cannot locate required test file at {0}. Test will not run.", BrightstarDB.Configuration.StoreLocation + "\\import\\" + fileName); return; } var storeId = Guid.NewGuid().ToString(); _storeManager.CreateStore(BrightstarDB.Configuration.StoreLocation + "\\" + storeId); var timer = new Stopwatch(); var storeWorker = new StoreWorker(BrightstarDB.Configuration.StoreLocation, storeId); storeWorker.Start(); timer.Start(); var jobId = storeWorker.Import(fileName, Constants.DefaultGraphUri).ToString(); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId); while (jobStatus.JobStatus == JobStatus.Pending || jobStatus.JobStatus == JobStatus.Started) { Thread.Sleep(100); jobStatus = storeWorker.GetJobStatus(jobId); } timer.Stop(); Console.WriteLine("Time to import test file '" + fileName + "': " + timer.ElapsedMilliseconds); }
public void TestQueryCaching() { Configuration.EnableQueryCache = true; // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.AreEqual(JobStatus.CompletedOk, jobStatus.JobStatus, "Import failed: {0} : {1}", jobStatus.Information, jobStatus.ExceptionDetail); var sw = new Stopwatch(); sw.Start(); var queryResult = storeWorker.Query("select * where { ?a ?b ?c }", SparqlResultsFormat.Xml, new[] { Constants.DefaultGraphUri }); sw.Stop(); Console.WriteLine("initial query took : " + sw.ElapsedMilliseconds); var initTime = sw.ElapsedMilliseconds; sw = new Stopwatch(); sw.Start(); var cachedResult = storeWorker.Query("select * where { ?a ?b ?c }", SparqlResultsFormat.Xml, new[] { Constants.DefaultGraphUri }); sw.Stop(); Console.WriteLine("warm query took : " + sw.ElapsedMilliseconds); Thread.Sleep(1000); sw = new Stopwatch(); sw.Start(); cachedResult = storeWorker.Query("select * where { ?a ?b ?c }", SparqlResultsFormat.Xml, new[] { Constants.DefaultGraphUri }); sw.Stop(); Console.WriteLine("cached query took : " + sw.ElapsedMilliseconds); var cachedTime = sw.ElapsedMilliseconds; Assert.AreEqual(queryResult, cachedResult); if (cachedTime >= initTime) { Assert.Inconclusive( "Expected time to read from cache ({0}ms) to be less than time to execute query ({1}ms).", cachedTime, initTime); } Configuration.EnableQueryCache = false; }
private async Task <JobExecutionStatus> RunJob(JobExecutionRequest request) { /* * Temporarily disabled. * TODO: https://github.com/AElfProject/AElf/issues/338 * _state = State.Running; */ var result = await _proxyExecutingService.ExecuteAsync(request.Transactions, request.ChainId, _cancellationTokenSource.Token); request.ResultCollector?.Tell(new TransactionTraceMessage(request.RequestId, result)); // TODO: What if actor died in the middle var retMsg = new JobExecutionStatus(request.RequestId, JobExecutionStatus.RequestStatus.Completed); // TODO: tell requestor and router about the worker complete job,and set to idle state. /* * Temporarily disabled. * TODO: https://github.com/AElfProject/AElf/issues/338 * request.ResultCollector?.Tell(retMsg); * request.Router?.Tell(retMsg); */ _servingRequestId = -1; /* * Temporarily disabled. * TODO: https://github.com/AElfProject/AElf/issues/338 * _state = State.Idle; */ return(retMsg); }
public ProgressSnapshotMessage(JobExecutionStatus status, ProgressStageSnapshot consumed, ProgressStageSnapshot serialised, long totalEvents, bool isCalculatingTotalEvents) { Status = status; Consumed = consumed; Serialised = serialised; TotalEvents = totalEvents; IsStillCalculatingTotalEvents = isCalculatingTotalEvents; }
public void TestReadTransactionList() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions var data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } var transactionLog = storeWorker.TransactionLog; var transactionList = transactionLog.GetTransactionList(); var i = 0; while (transactionList.MoveNext()) { i++; } Assert.AreEqual(1, i); data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } transactionList.Reset(); i = 0; while (transactionList.MoveNext()) { i++; } Assert.AreEqual(2, i); }
/// <summary> /// Creates a JobInfoObject that copies information from an internal JobExecutionStatus object /// </summary> /// <param name="executionStatus"></param> public JobInfoObject(JobExecutionStatus executionStatus) { JobId = executionStatus.JobId.ToString(); JobStatus = executionStatus.JobStatus; StatusMessage = executionStatus.Information; ExceptionInfo = executionStatus.ExceptionDetail; QueuedTime = executionStatus.Queued; StartTime = executionStatus.Started; EndTime = executionStatus.Ended; }
public void TestRecoverTransactionData() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } var transactionLog = storeWorker.TransactionLog; var transactionList = transactionLog.GetTransactionList(); var i = 0; while (transactionList.MoveNext()) { i++; } Assert.AreEqual(1, i); // now get txn data var txnList = storeWorker.TransactionLog.GetTransactionList(); txnList.MoveNext(); var tinfo = txnList.Current; Assert.IsNotNull(tinfo); Assert.AreEqual(TransactionType.GuardedUpdateTransaction, tinfo.TransactionType); Assert.AreEqual(TransactionStatus.CompletedOk, tinfo.TransactionStatus); Assert.IsTrue(tinfo.TransactionStartTime < DateTime.UtcNow); var job = new GuardedUpdateTransaction(Guid.NewGuid(), null, storeWorker); using (var tdStream = storeWorker.TransactionLog.GetTransactionData(tinfo.DataStartPosition)) { job.ReadTransactionDataFromStream(tdStream); } Assert.IsNotNull(job); Assert.AreEqual(data, job.InsertData); }
private void HandleExecutionStatus(JobExecutionStatus status) { if (status.Status != JobExecutionStatus.RequestStatus.Running && status.Status != JobExecutionStatus.RequestStatus.Completed) { _requestIdToTaskCompleteSource[status.RequestId].TrySetResult(new List <TransactionTrace>()); _requesteIdTransactionCounts.Remove(status.RequestId); _requestIdToTaskCompleteSource.Remove(status.RequestId); _requestIdToTraces.Remove(status.RequestId); // _requestIdToPendingTransactionIds.Remove(status.RequestId); } }
internal static string ToSerializedValue(this JobExecutionStatus value) { switch (value) { case JobExecutionStatus.Completed: return("Completed"); case JobExecutionStatus.Failed: return("Failed"); case JobExecutionStatus.Postponed: return("Postponed"); } return(null); }
public void TestImportAndLookupPerformance() { if (!File.Exists(BrightstarDB.Configuration.StoreLocation + "\\import\\bsbm_5m.nt")) { Assert.Inconclusive("Cannot locate required test file at {0}. Test will not run.", BrightstarDB.Configuration.StoreLocation + "\\import\\bsbm_5m.nt"); return; } var storeId = Guid.NewGuid().ToString(); _storeManager.CreateStore(BrightstarDB.Configuration.StoreLocation + "\\" + storeId); var timer = new Stopwatch(); var storeWorker = new StoreWorker(BrightstarDB.Configuration.StoreLocation, storeId); storeWorker.Start(); timer.Start(); var jobId = storeWorker.Import("bsbm_5m.nt", Constants.DefaultGraphUri).ToString(); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus == JobStatus.Pending || jobStatus.JobStatus == JobStatus.Started) { Thread.Sleep(100); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } timer.Stop(); Console.WriteLine("Time to import 5M triples test file: " + timer.ElapsedMilliseconds); var store = _storeManager.OpenStore(BrightstarDB.Configuration.StoreLocation + "\\" + storeId); var validator = new TriplesValidator(store, BrightstarDB.Configuration.StoreLocation + "\\import\\bsbm_5m.nt"); timer.Reset(); timer.Start(); validator.Run(); timer.Stop(); Console.WriteLine("Time to validate 5M triples test file:" + timer.ElapsedMilliseconds); if (validator.UnmatchedTriples.Any()) { Assert.Fail("Validator failed to match {0} triples:\n", validator.UnmatchedTriples.Count, String.Join("\n", validator.UnmatchedTriples) ); } }
public void TestTransactionLogCreatedWhenLoggingEnabled() { // create a store var sid = CreateStore(withTransactionLog: true); var txnHeadersFile = Path.Combine(Configuration.StoreLocation, sid, "transactionheaders.bs"); var txnLogFile = Path.Combine(Configuration.StoreLocation, sid, "transactions.bs"); // Creating the store should create the files Assert.IsTrue(File.Exists(txnHeadersFile), "Expected transactionheaders.bs file to be created when store is initially created"); Assert.IsTrue(File.Exists(txnLogFile), "Expected transactions.bs file to be created when store is initially created"); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } // Transaction files should still be there Assert.IsTrue(File.Exists(txnHeadersFile)); Assert.IsTrue(File.Exists(txnLogFile)); // There should also be some content in both files using (var txnStream = File.OpenRead(txnHeadersFile)) { Assert.Greater(txnStream.Length, 0L); } using (var txnStream = File.OpenRead(txnLogFile)) { Assert.Greater(txnStream.Length, 0L); } }
public void TestTransactionWithPrecondition() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions var data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np> .\n <http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer> ."; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.IsTrue(jobStatus.JobStatus == JobStatus.CompletedOk, "Initial insert failed: {0} : {1}", jobStatus.Information, jobStatus.ExceptionDetail); // now test precondition const string preconds = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer>"; data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; jobId = storeWorker.ProcessTransaction(preconds, "", "", data, Constants.DefaultGraphUri, "nt"); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.IsTrue(jobStatus.JobStatus == JobStatus.CompletedOk, "Transaction execution failed: {0} : {1}", jobStatus.Information, jobStatus.ExceptionDetail); }
public async Task HandleAsync(CreateNewProjectionVersion sagaTimeout) { RebuildProjection_Job job = jobFactory.CreateJob(sagaTimeout.ProjectionVersionRequest.Version, sagaTimeout.ProjectionVersionRequest.Timebox); JobExecutionStatus result = await jobRunner.ExecuteAsync(job).ConfigureAwait(false); logger.Debug(() => "Replay projection version {@cronus_projection_rebuild}", result); if (result == JobExecutionStatus.Running) { RequestTimeout(new CreateNewProjectionVersion(sagaTimeout.ProjectionVersionRequest, DateTime.UtcNow.AddSeconds(30))); } else if (result == JobExecutionStatus.Failed) { var cancel = new CancelProjectionVersionRequest(sagaTimeout.ProjectionVersionRequest.Id, sagaTimeout.ProjectionVersionRequest.Version, "Failed"); commandPublisher.Publish(cancel); } else if (result == JobExecutionStatus.Completed) { var finalize = new FinalizeProjectionVersionRequest(sagaTimeout.ProjectionVersionRequest.Id, sagaTimeout.ProjectionVersionRequest.Version); commandPublisher.Publish(finalize); } }
public void TestTransactionWithPrecondition() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions var data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np> .\n <http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer> ."; var jobId = storeWorker.ProcessTransaction("", "", data, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } // now test precondition const string preconds = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer>"; // execute transactions data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; jobId = storeWorker.ProcessTransaction(preconds, "", data, "nt"); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } }
public void TestTransaction() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus status = storeWorker.GetJobStatus(jobId.ToString()); while (status.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); status = storeWorker.GetJobStatus(jobId.ToString()); } }
public void TestSparkJobInfo( [Values(JobExecutionStatus.Failed, JobExecutionStatus.Running, JobExecutionStatus.Succeeded, JobExecutionStatus.Unknown)] JobExecutionStatus status ) { const int jobId = 65536; int[] stageIds = new[] { 100, 102, 104 }; // arrange Mock <IStatusTrackerProxy> statusTrackerProxy = new Mock <IStatusTrackerProxy>(); var expectedJobInfo = new SparkJobInfo(jobId, stageIds, status); statusTrackerProxy.Setup(m => m.GetJobInfo(It.IsAny <int>())).Returns(expectedJobInfo); var tracker = new StatusTracker(statusTrackerProxy.Object); // act SparkJobInfo jobInfo = tracker.GetJobInfo(jobId); // assert Assert.IsNotNull(jobInfo); Assert.AreEqual(jobId, jobInfo.JobId); Assert.AreEqual(stageIds, jobInfo.StageIds); Assert.AreEqual(status, jobInfo.Status); }
/// <summary> /// Execute an update transaction. /// </summary> /// <param name="storeName">The name of the store to modify</param> /// <param name="preconditions">NTriples that must be in the store in order for the transaction to execute</param> /// <param name="deletePatterns">The delete patterns that will be removed from the store</param> /// <param name="insertData">The NTriples data that will be inserted into the store.</param> /// <param name="waitForCompletion">If set to true the method will block until the transaction completes</param> /// <returns>Job Info</returns> public IJobInfo ExecuteTransaction(string storeName, string preconditions, string deletePatterns, string insertData, bool waitForCompletion = true) { try { if (!waitForCompletion) { var jobId = _serverCore.ProcessTransaction(storeName, preconditions, deletePatterns, insertData); return(new JobInfoWrapper(new JobInfo { JobId = jobId.ToString(), JobPending = true })); } else { var jobId = _serverCore.ProcessTransaction(storeName, preconditions, deletePatterns, insertData); JobExecutionStatus status = _serverCore.GetJobStatus(storeName, jobId.ToString()); while (status.JobStatus != JobStatus.CompletedOk && status.JobStatus != JobStatus.TransactionError) { Thread.Sleep(50); status = _serverCore.GetJobStatus(storeName, jobId.ToString()); } return(new JobInfoWrapper(new JobInfo { JobId = jobId.ToString(), StatusMessage = status.Information, JobCompletedOk = (status.JobStatus == JobStatus.CompletedOk), JobCompletedWithErrors = (status.JobStatus == JobStatus.TransactionError) })); } } catch (Exception ex) { Logging.LogError(BrightstarEventId.ServerCoreException, "Error Queing Transaction {0} {1} {2}", storeName, deletePatterns, insertData); throw new BrightstarClientException("Error queing transaction in store " + storeName + ". " + ex.Message, ex); } }
/// <summary> /// Execute a SPARQL Update expression against a store /// </summary> /// <param name="storeName">The name of the store to be updated</param> /// <param name="updateExpression">The SPARQL Update expression to be applied</param> /// <param name="waitForCompletion">If set to true, the method will block until the transaction completes</param> /// <returns>A <see cref="JobInfo"/> instance for monitoring the status of the job</returns> public IJobInfo ExecuteUpdate(string storeName, string updateExpression, bool waitForCompletion = true) { try { if (!waitForCompletion) { var jobId = _serverCore.ExecuteUpdate(storeName, updateExpression); return(new JobInfoWrapper(new JobInfo { JobId = jobId.ToString(), JobPending = true })); } else { var jobId = _serverCore.ExecuteUpdate(storeName, updateExpression); JobExecutionStatus status = _serverCore.GetJobStatus(storeName, jobId.ToString()); while (status.JobStatus != JobStatus.CompletedOk && status.JobStatus != JobStatus.TransactionError) { Thread.Sleep(50); status = _serverCore.GetJobStatus(storeName, jobId.ToString()); } return(new JobInfoWrapper(new JobInfo { JobId = jobId.ToString(), StatusMessage = status.Information, JobCompletedOk = (status.JobStatus == JobStatus.CompletedOk), JobCompletedWithErrors = (status.JobStatus == JobStatus.TransactionError) })); } } catch (Exception ex) { Logging.LogError(BrightstarEventId.ServerCoreException, "Error queing SPARQL update {0} {1}", storeName, updateExpression); throw new BrightstarClientException("Error queing SPARQL update in store " + storeName + ". " + ex.Message, ex); } }
public List <JobExecutionStatus> GetCurrentJobStatus() { var executingJobs = scheduler.GetCurrentlyExecutingJobs(); List <JobExecutionStatus> jobStatusList = new List <JobExecutionStatus>(); foreach (var exJob in executingJobs) { var jobStatus = new JobExecutionStatus(); jobStatus.JobKey = exJob.JobDetail.Key.Name; jobStatus.TriggerName = exJob.Trigger.Key.Name; jobStatus.FireTime = GetLocalDateTime(exJob.FireTimeUtc); jobStatus.PreviousFireTime = GetLocalDateTime(exJob.PreviousFireTimeUtc); jobStatus.ScheduledFireTime = GetLocalDateTime(exJob.ScheduledFireTimeUtc); jobStatus.NextFireTime = GetLocalDateTime(exJob.NextFireTimeUtc); jobStatus.JobRunTime = exJob.JobRunTime.TotalSeconds; jobStatus.State = scheduler.GetTriggerState(exJob.Trigger.Key).ToString(); jobStatusList.Add(jobStatus); } return(jobStatusList); }
public List <JobExecutionStatus> GetJobFireTimes() { //TODO: Get next fire time for jobs which are not currently running List <JobExecutionStatus> jobStatusList = new List <JobExecutionStatus>(); var jobKeys = scheduler.GetJobKeys(GroupMatcher <JobKey> .AnyGroup()); foreach (var job in jobKeys) { var triggers = scheduler.GetTriggersOfJob(job); foreach (var trigger in triggers) { var jobStatus = new JobExecutionStatus(); jobStatus.JobKey = job.Name; jobStatus.TriggerName = trigger.Key.Name; jobStatus.PreviousFireTime = GetLocalDateTime(trigger.GetPreviousFireTimeUtc()); jobStatus.NextFireTime = GetLocalDateTime(trigger.GetNextFireTimeUtc()); jobStatusList.Add(jobStatus); } } return(jobStatusList); }
public async Task HandleAsync(RebuildIndexInternal sagaTimeout) { ICronusJob <object> job = null; // we need to redesign the job factories var theId = sagaTimeout.EventStoreIndexRequest.Id.Id; if (theId.Equals(typeof(EventToAggregateRootId).GetContractId(), StringComparison.OrdinalIgnoreCase)) { job = jobFactory.CreateJob(sagaTimeout.EventStoreIndexRequest.Timebox); } else if (theId.Equals(typeof(MessageCounterIndex).GetContractId(), StringComparison.OrdinalIgnoreCase)) { job = messageCounterJobFactory.CreateJob(sagaTimeout.EventStoreIndexRequest.Timebox); } else { return; } JobExecutionStatus result = await jobRunner.ExecuteAsync(job).ConfigureAwait(false); if (result == JobExecutionStatus.Running) { RequestTimeout(new RebuildIndexInternal(sagaTimeout.EventStoreIndexRequest, DateTime.UtcNow.AddSeconds(30))); } else if (result == JobExecutionStatus.Failed) { // log error RequestTimeout(new RebuildIndexInternal(sagaTimeout.EventStoreIndexRequest, DateTime.UtcNow.AddSeconds(30))); } else if (result == JobExecutionStatus.Completed) { var finalize = new FinalizeEventStoreIndexRequest(sagaTimeout.EventStoreIndexRequest.Id); commandPublisher.Publish(finalize); } }
public void JobWasExecuted_ForOnCompletionTriggerAndCompletedState_WillTriggerNextJobs(JobExecutionStatus status) { JobKey jobA = new JobKey("JobA"); JobKey jobB = new JobKey("JobB"); JobKey jobC = new JobKey("JobC"); _jobDetail.Expect(j => j.Key).Return(jobA); _context.Expect(c => c.JobDetail).Return(_jobDetail); _context.Expect(c => c.Result).Return(status); _context.Scheduler.Expect(s => s.TriggerJob(jobB)); _context.Scheduler.Expect(s => s.TriggerJob(jobC)); _listener.AddJobChainLink(jobA, JobResultCriteria.OnCompletion, jobB); _listener.AddJobChainLink(jobA, JobResultCriteria.OnCompletion, jobC); _listener.JobWasExecuted(_context, null); _testLogger.AssertInfoMessagesLogged($"Completion of Job 'DEFAULT.{jobA.Name}' will now trigger Job 'DEFAULT.{jobB.Name}'", $"Completion of Job 'DEFAULT.{jobA.Name}' will now trigger Job 'DEFAULT.{jobC.Name}'"); }
public void JobWasExecuted_ForOnFailureTriggerAndNonFailedState_WillNotTriggerNextJob(JobExecutionStatus status) { JobKey jobA = new JobKey("JobA"); JobKey jobB = new JobKey("JobB"); _jobDetail.Expect(j => j.Key).Return(jobA); _context.Expect(c => c.JobDetail).Return(_jobDetail); _context.Expect(c => c.Result).Return(status); _listener.AddJobChainLink(jobA, JobResultCriteria.OnFailure, jobB); _listener.JobWasExecuted(_context, null); _testLogger.AssertNoMessagesLogged(); }
private JobExecutionStatus WaitForMaxConcurrentJobsToStart(int jobsToAdd, int toStart, int iterations, bool jobShouldThrow) { var status = new JobExecutionStatus() { Queue = _maxConcurrentJobQueueFactory(Scheduler.Immediate, AutoJobExecutionQueue <TJobInput, TJobOutput> .DefaultConcurrent, toStart) as IJobExecutionQueue <TJobInput, TJobOutput>, AllJobsForIterationLaunched = new ManualResetEventSlim(false), PrimaryJobPauser = new ManualResetEventSlim(false), SecondaryJobPauser = new ManualResetEventSlim(false), RemainingJobs = jobsToAdd }; using (var firstGateCrossedByAllJobs = new ManualResetEventSlim(false)) using (var secondGateCrossedByAllJobs = new ManualResetEventSlim(false)) { int jobCounter = 0, firstGateCounter = 0, secondGateCounter = 0; for (int i = 0; i < jobsToAdd; i++) { status.Queue.Add(A.Dummy <TJobInput>(), jobInput => { int expectedCounterValue = Math.Min(status.Queue.MaxConcurrent, Math.Min(toStart, status.RemainingJobs)); if (Interlocked.Increment(ref jobCounter) == expectedCounterValue) { status.AllJobsForIterationLaunched.Set(); } //we have to get a bit complex here by controlling automatic job execution with 2 locks status.PrimaryJobPauser.Wait(); if (Interlocked.Increment(ref firstGateCounter) == expectedCounterValue) { firstGateCrossedByAllJobs.Set(); } status.SecondaryJobPauser.Wait(); if (Interlocked.Increment(ref secondGateCounter) == expectedCounterValue) { secondGateCrossedByAllJobs.Set(); } if (!jobShouldThrow) { return(A.Dummy <TJobOutput>()); } throw new ArgumentException(); }); } for (int j = 0; j < iterations; j++) { //wait for all jobs to start, then reset counter, allow them to complete, and reset allJobsLaunched status.AllJobsForIterationLaunched.Wait(TimeSpan.FromSeconds(5)); //throw new ApplicationException(); Interlocked.Exchange(ref jobCounter, 0); status.AllJobsForIterationLaunched.Reset(); status.RemainingJobs = status.Queue.QueuedCount; //let the next set begin to flow through, by unlocking the first gate status.PrimaryJobPauser.Set(); //wait until all the jobs have received it before resetting the pauser, which will hold the next set of jobs as they're pumped in firstGateCrossedByAllJobs.Wait(TimeSpan.FromSeconds(5)); firstGateCrossedByAllJobs.Reset(); status.PrimaryJobPauser.Reset(); //reset the gate counter... Interlocked.Exchange(ref firstGateCounter, 0); //if it's our last iteration, leaved the jobs paused if (j != iterations - 1) { status.SecondaryJobPauser.Set(); //wait again, this time for the second gate secondGateCrossedByAllJobs.Wait(TimeSpan.FromSeconds(5)); secondGateCrossedByAllJobs.Reset(); Interlocked.Exchange(ref secondGateCounter, 0); } status.SecondaryJobPauser.Reset(); } return(status); } }
public void TestConsolidateStore() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions var data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } jobId = Guid.NewGuid(); storeWorker.QueueJob(new ConsolidateJob(jobId, null, storeWorker)); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.AreEqual(JobStatus.CompletedOk, jobStatus.JobStatus); // open store and find all triples var results = storeWorker.Query("select * where { ?a ?b ?c }", SparqlResultsFormat.Xml, new[] { Constants.DefaultGraphUri }); var doc = XDocument.Parse(results); XNamespace sparqlNs = "http://www.w3.org/2005/sparql-results#"; Assert.AreEqual(1, doc.Descendants(sparqlNs + "result").Count()); data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; jobId = storeWorker.ProcessTransaction("", "", data, "", Constants.DefaultGraphUri, "nt"); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.AreEqual(JobStatus.CompletedOk, jobStatus.JobStatus); // consolidate again jobId = Guid.NewGuid(); storeWorker.QueueJob(new ConsolidateJob(jobId, null, storeWorker)); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.AreEqual(JobStatus.CompletedOk, jobStatus.JobStatus); results = storeWorker.Query("select * where { ?a ?b ?c }", SparqlResultsFormat.Xml, new[] { Constants.DefaultGraphUri }); doc = XDocument.Parse(results); Assert.AreEqual(0, doc.Descendants(sparqlNs + "result").Count()); }
public void TestFailedTransactionAppearsInTransactionList() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transaction with bad data var data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } var transactionLog = storeWorker.TransactionLog; var transactionList = transactionLog.GetTransactionList(); var i = 0; while (transactionList.MoveNext()) { i++; } Assert.AreEqual(1, i); var txnList = storeWorker.TransactionLog.GetTransactionList(); txnList.MoveNext(); var tinfo = txnList.Current; Assert.IsNotNull(tinfo); Assert.AreEqual(TransactionType.GuardedUpdateTransaction, tinfo.TransactionType); Assert.AreEqual(TransactionStatus.Failed, tinfo.TransactionStatus); data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } transactionList.Reset(); i = 0; while (transactionList.MoveNext()) { i++; } Assert.AreEqual(2, i); }