public void TestDeleteStoreAfterUpdate() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } // var queryResult = storeWorker.Query("select * where { ?s ?p ?o }"); storeWorker.Shutdown(true, () => _storeManager.DeleteStore(Configuration.StoreLocation + "\\" + sid)); }
public void TestExportJob() { var sid = Guid.NewGuid().ToString(); using (var store = _storeManager.CreateStore(Configuration.StoreLocation + "\\" + sid)) { store.InsertTriple("http://www.example.org/alice", "http://xmlns.org/foaf/0.1/knows", "http://www.example.org/bob", false, null, null, Constants.DefaultGraphUri); store.InsertTriple("http://www.example.org/bob", "http://xmlns.org/foaf/0.1/knows", "http://www.example.org/alice", false, null, null, Constants.DefaultGraphUri); store.Commit(Guid.NewGuid()); } var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); var jobId = storeWorker.Export(sid + "_export.nt", null, RdfFormat.NQuads); JobExecutionStatus status = storeWorker.GetJobStatus(jobId.ToString()); while (status.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); status = storeWorker.GetJobStatus(jobId.ToString()); if (status.JobStatus == JobStatus.TransactionError) { Assert.Fail("Export job failed with a transaction error. Message={0}. Exception Detail={1}", status.Information, status.ExceptionDetail); } } }
public void TestTransactionWithNonExistsancePreconditionFails() { var storeId = CreateStore(); var storeWorker = new StoreWorker(Configuration.StoreLocation, storeId); storeWorker.Start(); const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np> .\n <http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer> ."; const string notExistsPrecondition = @"<http://www.networkedplanet.com/people/gra> <" + Constants.WildcardUri + "> <" + Constants.WildcardUri + ">."; const string insertData = @"<http://www.networkedplanet.com/people/kal> <http://www.newtorkedplanet.com/types/worksfor> <http://wwww.networkedplanet.com/companies/np> .\n <http://www.networkedplanet.com/people/kal> <http://www.networkedplanet.com/core/version> ""1""^^<http://wwww.w3.org/2000/01/rdf-schema#integer> ."; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt", "UpdateTransaction"); AssertJobCompleted(storeWorker, jobId, JobStatus.CompletedOk); jobId = storeWorker.ProcessTransaction("", notExistsPrecondition, "", insertData, Constants.DefaultGraphUri, "nt", "UpdateTransaction2"); var jobStatus = AssertJobCompleted(storeWorker, jobId, JobStatus.TransactionError); Assert.IsTrue(jobStatus.ExceptionDetail.Message.Contains("Transaction preconditions failed"), "Unexpected job exception message: {0}", jobStatus.ExceptionDetail.Message); }
public void TestImportPerformance25M() { const string fileName = "bsbm_25m.nt"; if (!File.Exists(BrightstarDB.Configuration.StoreLocation + "\\import\\" + fileName)) { Assert.Inconclusive("Cannot locate required test file at {0}. Test will not run.", BrightstarDB.Configuration.StoreLocation + "\\import\\" + fileName); return; } var storeId = Guid.NewGuid().ToString(); _storeManager.CreateStore(BrightstarDB.Configuration.StoreLocation + "\\" + storeId); var timer = new Stopwatch(); var storeWorker = new StoreWorker(BrightstarDB.Configuration.StoreLocation, storeId); storeWorker.Start(); timer.Start(); var jobId = storeWorker.Import(fileName, Constants.DefaultGraphUri).ToString(); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId); while (jobStatus.JobStatus == JobStatus.Pending || jobStatus.JobStatus == JobStatus.Started) { Thread.Sleep(100); jobStatus = storeWorker.GetJobStatus(jobId); } timer.Stop(); Console.WriteLine("Time to import test file '" + fileName + "': " + timer.ElapsedMilliseconds); }
public void TestTransactionWithPreconditionFails() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string preconds = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer>"; // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction(preconds, "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.AreEqual(jobStatus.JobStatus, JobStatus.TransactionError); }
private void BindGrid() { var data = XMOrderInfoProductDetailsService.GetWarningOrderList(); StoreWorker.DataSource = data; StoreWorker.DataBind(); }
public void TestGetErrorMessage() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.IsTrue(jobStatus.Information.Contains("Job Error"), "Unexpected job message: '{0}'", jobStatus.Information); Assert.IsTrue(jobStatus.ExceptionDetail.Message.Contains("Syntax error in triples to add."), "Unexpected job message: '{0}'", jobStatus.ExceptionDetail.Message); }
private void BindGrid() { var list = XMLogisticsCostService.GetXMLogisticsCostList(); StoreWorker.DataSource = list; StoreWorker.DataBind(); }
static void Main(string[] args) { //WorkPerformer del = new WorkPerformer(First); //WorkPerformer del2 = new WorkPerformer(Second); //del(3, ""); //Console.WriteLine("Hello World!"); Worker worker = new Worker(); Manager manager = new Manager(); Fam fam = new Fam(); worker.WorkPerformed += new EventHandler <WorkerEventArgs>(fam.Worker_WorkPerformed); worker.WorkPerformed += manager.Worker_WorkPerformed; worker.WorkCompleted += manager.Worker_Completed; worker.WorkCompleted += fam.Worker_Completed; worker.DoWork(work: new WorkerEventArgs(5, WorkType.GOLF)); Console.WriteLine("Called first"); StoreWorker storeWorker = new StoreWorker(); StoreA storeA = new StoreA(); StoreB storeB = new StoreB(); storeWorker.storeVisited += storeA.StoreVisitedPerformed; storeWorker.storeVisited += storeB.StoreVisitedPerformed; storeWorker.VisitStore(5); }
private void BindGrid() { var data = XMOrderInfoService.getUnusualOrder(); StoreWorker.DataSource = data.Where(a => a.ProjectId == projectID); StoreWorker.DataBind(); }
private void BindGrid() { var data = XMProductService.getProductByMinPriceUnSet(brandTypeId); StoreWorker.DataSource = data; StoreWorker.DataBind(); }
public void TestNoTransactionLogWhenLoggingDisabled() { // create a store var sid = CreateStore(withTransactionLog: false); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); Assert.IsFalse(File.Exists(Path.Combine(Configuration.StoreLocation, sid, "transactionheaders.bs"))); Assert.IsFalse(File.Exists(Path.Combine(Configuration.StoreLocation, sid, "transactions.bs"))); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); var jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.IsFalse(File.Exists(Path.Combine(Configuration.StoreLocation, sid, "transactionheaders.bs"))); Assert.IsFalse(File.Exists(Path.Combine(Configuration.StoreLocation, sid, "transactions.bs"))); }
public void TestQueryCaching() { Configuration.EnableQueryCache = true; // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.AreEqual(JobStatus.CompletedOk, jobStatus.JobStatus, "Import failed: {0} : {1}", jobStatus.Information, jobStatus.ExceptionDetail); var sw = new Stopwatch(); sw.Start(); var queryResult = storeWorker.Query("select * where { ?a ?b ?c }", SparqlResultsFormat.Xml, new[] { Constants.DefaultGraphUri }); sw.Stop(); Console.WriteLine("initial query took : " + sw.ElapsedMilliseconds); var initTime = sw.ElapsedMilliseconds; sw = new Stopwatch(); sw.Start(); var cachedResult = storeWorker.Query("select * where { ?a ?b ?c }", SparqlResultsFormat.Xml, new[] { Constants.DefaultGraphUri }); sw.Stop(); Console.WriteLine("warm query took : " + sw.ElapsedMilliseconds); Thread.Sleep(1000); sw = new Stopwatch(); sw.Start(); cachedResult = storeWorker.Query("select * where { ?a ?b ?c }", SparqlResultsFormat.Xml, new[] { Constants.DefaultGraphUri }); sw.Stop(); Console.WriteLine("cached query took : " + sw.ElapsedMilliseconds); var cachedTime = sw.ElapsedMilliseconds; Assert.AreEqual(queryResult, cachedResult); if (cachedTime >= initTime) { Assert.Inconclusive( "Expected time to read from cache ({0}ms) to be less than time to execute query ({1}ms).", cachedTime, initTime); } Configuration.EnableQueryCache = false; }
public void TestReadTransactionList() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions var data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } var transactionLog = storeWorker.TransactionLog; var transactionList = transactionLog.GetTransactionList(); var i = 0; while (transactionList.MoveNext()) { i++; } Assert.AreEqual(1, i); data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } transactionList.Reset(); i = 0; while (transactionList.MoveNext()) { i++; } Assert.AreEqual(2, i); }
public void TestRecoverTransactionData() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } var transactionLog = storeWorker.TransactionLog; var transactionList = transactionLog.GetTransactionList(); var i = 0; while (transactionList.MoveNext()) { i++; } Assert.AreEqual(1, i); // now get txn data var txnList = storeWorker.TransactionLog.GetTransactionList(); txnList.MoveNext(); var tinfo = txnList.Current; Assert.IsNotNull(tinfo); Assert.AreEqual(TransactionType.GuardedUpdateTransaction, tinfo.TransactionType); Assert.AreEqual(TransactionStatus.CompletedOk, tinfo.TransactionStatus); Assert.IsTrue(tinfo.TransactionStartTime < DateTime.UtcNow); var job = new GuardedUpdateTransaction(Guid.NewGuid(), null, storeWorker); using (var tdStream = storeWorker.TransactionLog.GetTransactionData(tinfo.DataStartPosition)) { job.ReadTransactionDataFromStream(tdStream); } Assert.IsNotNull(job); Assert.AreEqual(data, job.InsertData); }
public static string Query(this StoreWorker storeWorker, string sparqlExpression, SparqlResultsFormat resultsFormat, string[] defaultGraphUris) { var query = ParseSparql(sparqlExpression); using (var resultsStream = new MemoryStream()) { storeWorker.Query(query, resultsFormat.WithEncoding(new UTF8Encoding(false)), resultsStream, defaultGraphUris); return(Encoding.UTF8.GetString(resultsStream.ToArray())); } }
private void BindGrid() { string name = txtUserName.Text.Trim(); string province = txtProvince.Text.Trim(); string city = txtCity.Text.Trim(); string region = txtRegion.Text.Trim(); var list = XMWorkerInfoService.GetXMWorkerInfoList(name, province, city, region); StoreWorker.DataSource = list; StoreWorker.DataBind(); }
private static JobExecutionStatus AssertJobCompleted(StoreWorker storeWorker, Guid jobId, JobStatus expectedStatus) { JobExecutionStatus jobStatus; do { Thread.Sleep(250); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError); Assert.AreEqual(expectedStatus, jobStatus.JobStatus, "Job completed with an unexpected status"); return(jobStatus); }
private static void AssertJobCompletedOk(StoreWorker storeWorker, Guid jobId) { var jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.That(jobStatus.JobStatus, Is.EqualTo(JobStatus.CompletedOk), "Unexpected job failure: " + jobStatus.Information + " - " + jobStatus.ExceptionDetail); }
private static void Store(DicomDataSet ds, List <DicomScp> scps) { try { if (scps.Count > 0) { StoreWorker sw = new StoreWorker(scps, ds); sw.RunWorkerAsync(); } } finally { ds = null; } }
public void TestDeletingTransactionHeadersDisablesLogging() { // create a store var sid = CreateStore(withTransactionLog: true); var txnHeadersFile = Path.Combine(Configuration.StoreLocation, sid, "transactionheaders.bs"); var txnLogFile = Path.Combine(Configuration.StoreLocation, sid, "transactions.bs"); // Creating the store should create the files Assert.IsTrue(File.Exists(txnHeadersFile), "Expected transactionheaders.bs file to be created when store is initially created"); Assert.IsTrue(File.Exists(txnLogFile), "Expected transactions.bs file to be created when store is initially created"); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); AssertJobCompletedOk(storeWorker, jobId); long logLength; using (var txnStream = File.OpenRead(txnLogFile)) { logLength = txnStream.Length; Assert.Greater(logLength, 0L); } // Remove the transaction headers file File.Delete(txnHeadersFile); // Execute a second transaction const string data2 = @"<http://www.networkedplanet.com/people/kal> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; jobId = storeWorker.ProcessTransaction("", "", "", data2, Constants.DefaultGraphUri, "nt"); AssertJobCompletedOk(storeWorker, jobId); Assert.IsFalse(File.Exists(txnHeadersFile), "Did not expect transactionheaders.bs to reappear after second transaction"); Assert.IsTrue(File.Exists(txnLogFile), "Expected transactions.bs file to remain untouched after second transaction"); using (var txnStream = File.OpenRead(txnLogFile)) { Assert.AreEqual(logLength, txnStream.Length, "Expected transaction log file to be unchanged in size by second transaction"); } }
public void TestImportAndLookupPerformance() { if (!File.Exists(BrightstarDB.Configuration.StoreLocation + "\\import\\bsbm_5m.nt")) { Assert.Inconclusive("Cannot locate required test file at {0}. Test will not run.", BrightstarDB.Configuration.StoreLocation + "\\import\\bsbm_5m.nt"); return; } var storeId = Guid.NewGuid().ToString(); _storeManager.CreateStore(BrightstarDB.Configuration.StoreLocation + "\\" + storeId); var timer = new Stopwatch(); var storeWorker = new StoreWorker(BrightstarDB.Configuration.StoreLocation, storeId); storeWorker.Start(); timer.Start(); var jobId = storeWorker.Import("bsbm_5m.nt", Constants.DefaultGraphUri).ToString(); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus == JobStatus.Pending || jobStatus.JobStatus == JobStatus.Started) { Thread.Sleep(100); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } timer.Stop(); Console.WriteLine("Time to import 5M triples test file: " + timer.ElapsedMilliseconds); var store = _storeManager.OpenStore(BrightstarDB.Configuration.StoreLocation + "\\" + storeId); var validator = new TriplesValidator(store, BrightstarDB.Configuration.StoreLocation + "\\import\\bsbm_5m.nt"); timer.Reset(); timer.Start(); validator.Run(); timer.Stop(); Console.WriteLine("Time to validate 5M triples test file:" + timer.ElapsedMilliseconds); if (validator.UnmatchedTriples.Any()) { Assert.Fail("Validator failed to match {0} triples:\n", validator.UnmatchedTriples.Count, String.Join("\n", validator.UnmatchedTriples) ); } }
public void TestTransactionLogCreatedWhenLoggingEnabled() { // create a store var sid = CreateStore(withTransactionLog: true); var txnHeadersFile = Path.Combine(Configuration.StoreLocation, sid, "transactionheaders.bs"); var txnLogFile = Path.Combine(Configuration.StoreLocation, sid, "transactions.bs"); // Creating the store should create the files Assert.IsTrue(File.Exists(txnHeadersFile), "Expected transactionheaders.bs file to be created when store is initially created"); Assert.IsTrue(File.Exists(txnLogFile), "Expected transactions.bs file to be created when store is initially created"); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } // Transaction files should still be there Assert.IsTrue(File.Exists(txnHeadersFile)); Assert.IsTrue(File.Exists(txnLogFile)); // There should also be some content in both files using (var txnStream = File.OpenRead(txnHeadersFile)) { Assert.Greater(txnStream.Length, 0L); } using (var txnStream = File.OpenRead(txnLogFile)) { Assert.Greater(txnStream.Length, 0L); } }
private static void Store(DicomDataSet ds, List <DicomScp> scps, int retries, int retryTimeout) { try { if (scps.Count > 0) { DicomServer server = ServiceLocator.Retrieve <DicomServer>(); StoreWorker sw = new StoreWorker(scps, ds); sw.EnableRetry = true; sw.NumberOfRetries = retries; sw.Timeout = retryTimeout; sw.RunWorkerAsync(); } } finally { ds = null; } }
public void TestTransactionWithPrecondition() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions var data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np> .\n <http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer> ."; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.IsTrue(jobStatus.JobStatus == JobStatus.CompletedOk, "Initial insert failed: {0} : {1}", jobStatus.Information, jobStatus.ExceptionDetail); // now test precondition const string preconds = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer>"; data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; jobId = storeWorker.ProcessTransaction(preconds, "", "", data, Constants.DefaultGraphUri, "nt"); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk && jobStatus.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } Assert.IsTrue(jobStatus.JobStatus == JobStatus.CompletedOk, "Transaction execution failed: {0} : {1}", jobStatus.Information, jobStatus.ExceptionDetail); }
public void TestTouchingTransactionHeadersEnablesLogging() { // create a store with logging disabled var sid = CreateStore(withTransactionLog: false); var txnHeadersFile = Path.Combine(Configuration.StoreLocation, sid, "transactionheaders.bs"); var txnLogFile = Path.Combine(Configuration.StoreLocation, sid, "transactions.bs"); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // Should be no transaction files because we created the store with logging disabled Assert.IsFalse(File.Exists(txnHeadersFile)); Assert.IsFalse(File.Exists(txnLogFile)); // But now "touch" the header file to create it File.Create(txnHeadersFile).Close(); // execute a transaction that logs data const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); AssertJobCompletedOk(storeWorker, jobId); // Transaction files should now both be there Assert.IsTrue(File.Exists(txnHeadersFile)); Assert.IsTrue(File.Exists(txnLogFile)); // There should also be some content in both files using (var txnStream = File.OpenRead(txnHeadersFile)) { Assert.Greater(txnStream.Length, 0L); } using (var txnStream = File.OpenRead(txnLogFile)) { Assert.Greater(txnStream.Length, 0L); } }
public void TestTransactionWithPrecondition() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions var data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np> .\n <http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer> ."; var jobId = storeWorker.ProcessTransaction("", "", data, "nt"); JobExecutionStatus jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } // now test precondition const string preconds = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/core/version> ""1""^^<http://www.w3.org/2000/01/rdf-schema#integer>"; // execute transactions data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; jobId = storeWorker.ProcessTransaction(preconds, "", data, "nt"); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); while (jobStatus.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); jobStatus = storeWorker.GetJobStatus(jobId.ToString()); } }
public void TestTransaction() { // create a store var sid = CreateStore(); // initialise and start the store worker var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); // execute transactions const string data = @"<http://www.networkedplanet.com/people/gra> <http://www.networkedplanet.com/types/worksfor> <http://www.networkedplanet.com/companies/np>"; var jobId = storeWorker.ProcessTransaction("", "", "", data, Constants.DefaultGraphUri, "nt"); JobExecutionStatus status = storeWorker.GetJobStatus(jobId.ToString()); while (status.JobStatus != JobStatus.CompletedOk) { Thread.Sleep(1000); status = storeWorker.GetJobStatus(jobId.ToString()); } }
public void TestStatsJob() { var sid = "StatsJob_" + DateTime.Now.Ticks; using (var store = _storeManager.CreateStore(Configuration.StoreLocation + "\\" + sid)) { store.InsertTriple("http://www.example.org/alice", "http://xmlns.org/foaf/0.1/knows", "http://www.example.org/bob", false, null, null, Constants.DefaultGraphUri); store.InsertTriple("http://www.example.org/alice", "http://xmlns.org/foaf/0.1/name", "Alice", true, RdfDatatypes.String, null, Constants.DefaultGraphUri); store.InsertTriple("http://www.example.org/bob", "http://xmlns.org/foaf/0.1/knows", "http://www.example.org/alice", false, null, null, Constants.DefaultGraphUri); store.Commit(Guid.NewGuid()); } var storeWorker = new StoreWorker(Configuration.StoreLocation, sid); storeWorker.Start(); var jobId = storeWorker.UpdateStatistics(); var status = storeWorker.GetJobStatus(jobId.ToString()); while (status.JobStatus != JobStatus.CompletedOk && status.JobStatus != JobStatus.TransactionError) { Thread.Sleep(1000); status = storeWorker.GetJobStatus(jobId.ToString()); } Assert.AreEqual(JobStatus.CompletedOk, status.JobStatus, "Expected UpdateStatsJob to complete OK"); var latestStats = storeWorker.StoreStatistics.GetStatistics().FirstOrDefault(); Assert.IsNotNull(latestStats); Assert.AreEqual(3, latestStats.TripleCount); Assert.AreEqual(2, latestStats.PredicateTripleCounts.Count); Assert.IsTrue(latestStats.PredicateTripleCounts.ContainsKey("http://xmlns.org/foaf/0.1/knows")); Assert.AreEqual(2, latestStats.PredicateTripleCounts["http://xmlns.org/foaf/0.1/knows"]); Assert.IsTrue(latestStats.PredicateTripleCounts.ContainsKey("http://xmlns.org/foaf/0.1/name")); Assert.AreEqual(1, latestStats.PredicateTripleCounts["http://xmlns.org/foaf/0.1/name"]); }
public void FindWorkingTransaction() { Store store = new Store("c:\\brightstar\\twitteringtest\\", false); FileStoreManager fsm = new FileStoreManager(StoreConfiguration.DefaultStoreConfiguration); int txnCount = 0; foreach (var cp in store.GetCommitPoints()) { var oldStore = fsm.OpenStore("c:\\brightstar\\twitteringtest\\", cp.LocationOffset); try { oldStore.ExecuteSparqlQuery(TestQuery, SparqlResultsFormat.Xml); Console.WriteLine("Query worked for commit point : {0} @ {1}", cp.LocationOffset, cp.CommitTime); break; } catch (Exception) { Console.WriteLine("Query failed for commit point : {0} @ {1}", cp.LocationOffset, cp.CommitTime); txnCount++; } } var txnLog = fsm.GetTransactionLog("c:\\brightstar\\twitteringtest"); var txnList = txnLog.GetTransactionList(); for (int i = 0; i <= txnCount; i++) { txnList.MoveNext(); var txnInfo = txnList.Current; Console.WriteLine("Transaction #{0}: Start: {1}, Status: {2}, JobId: {3}", i, txnInfo.TransactionStartTime, txnInfo.TransactionStatus, txnInfo.JobId); } // Going back to last known good store.RevertToCommitPoint(new CommitPoint(242472899, 0, DateTime.UtcNow, Guid.Empty)); var toReplay = new List <ITransactionInfo>(); txnList = txnLog.GetTransactionList(); for (int i = 0; i < 10; i++) { txnList.MoveNext(); toReplay.Add(txnList.Current); } var storeWorker = new StoreWorker("c:\\brightstar", "twitteringtest"); for (int i = 9; i >= 0; i--) { Console.WriteLine("Applying transaction : {0}", toReplay[i].JobId); txnLog.GetTransactionData(toReplay[i].DataStartPosition); var jobId = Guid.NewGuid(); var updateJob = new UpdateTransaction(jobId, storeWorker); updateJob.ReadTransactionDataFromStream(txnLog.GetTransactionData(toReplay[i].DataStartPosition)); updateJob.Run(); var readStore = storeWorker.ReadStore as Store; var resource = readStore.Resolve(1518601251); Assert.IsNotNull(resource); try { var query = StoreExtensions.ParseSparql(TestQuery); using (var resultStream = new MemoryStream()) { storeWorker.Query(query, SparqlResultsFormat.Xml, resultStream, new[] { Constants.DefaultGraphUri }); } Console.WriteLine("Query succeeded"); } catch (Exception ex) { Console.WriteLine("Query failed: " + ex.Message); Assert.Fail(); } } }