Esempio n. 1
0
        public void TestImportAndValidateSingleFile()
        {
            const string fileName = "bsbm_1M.nt";
            const string storeName = "ImportAndValidate";

            if (_storeManager.DoesStoreExist(storeName))
            {
                _storeManager.DeleteStore(storeName);
                while (_storeManager.DoesStoreExist(storeName))
                {
                    Thread.Sleep(10);
                }
            }
            using (var store = _storeManager.CreateStore(storeName))
            {
                var jobId = Guid.NewGuid();
                using (var triplesStream = File.OpenRead(fileName))
                {
                    store.Import(jobId, triplesStream);
                }
                store.Commit(jobId);                
            }

            using(var triplesStream = File.OpenRead(fileName))
            {
                using (var store = _storeManager.OpenStore(storeName))
                {
                    var validatorSink = new ValidatorSink(store);
                    var parser = new NTriplesParser();
                    parser.Parse(triplesStream, validatorSink, Constants.DefaultGraphUri);
                    Console.WriteLine("Validated {0} triples in store", validatorSink.ValidationCount);
                }
            }

        }
 public void TestBasicNtriples()
 {
     var ntp = new NTriplesParser();
     using (var fs = new FileStream(TestPaths.DataPath+"simple.txt", FileMode.Open))
     {
         ntp.Parse(fs, new NoopParser(), Constants.DefaultGraphUri);
     }
 }
 public void TestBrightstarParser()
 {
     var t = new Stopwatch();
     t.Start();
     using (var fs = new FileStream("BSBM_370k.nt", FileMode.Open))
     {
         var parser = new NTriplesParser();
         parser.Parse(fs, new NoopParser(), Constants.DefaultGraphUri);                
     }
     t.Stop();
     Console.WriteLine("Time for Brightstar Parser is " + t.ElapsedMilliseconds);
 }
 internal TransactionPreconditionsFailedException(string failedTriples)
     : base("Transaction preconditions were not met.")
 {
     FailedPreconditions = failedTriples;
     try
     {
         _invalidSubjects = new List<string>();
         var p = new NTriplesParser();
         using (var rdr = new StringReader(failedTriples))
         {
             p.Parse(rdr, this, Constants.DefaultGraphUri);
         }
     }
     catch
     {
         // Ignore any errors when trying to parse the failed preconditions
     }
 }
        public void TestImportEscaping()
        {
            var parser = new NTriplesParser();
            var sink = new LoggingTripleSink();
            using (
                var stream = new FileStream(TestPaths.DataPath + "escaping.nt", FileMode.Open, FileAccess.Read,
                                            FileShare.ReadWrite))
            {
                parser.Parse(stream, sink, "http://example.org/g");
            }

            Assert.That(sink.Triples, Has.Count.EqualTo(8));
            Assert.That(sink.Triples, Has.Some.Property("Object").EqualTo("simple literal"));
            Assert.That(sink.Triples, Has.Some.Property("Object").EqualTo("backslash:\\"));
            Assert.That(sink.Triples, Has.Some.Property("Object").EqualTo("dquote:\""));
            Assert.That(sink.Triples, Has.Some.Property("Object").EqualTo("newline:\n"));
            Assert.That(sink.Triples, Has.Some.Property("Object").EqualTo("tab:\t"));
            Assert.That(sink.Triples, Has.Some.Property("Object").EqualTo("\u00E9"));
            Assert.That(sink.Triples, Has.Some.Property("Object").EqualTo("\u20AC"));
        }
Esempio n. 6
0
 public void Run()
 {
     try
     {
         var jobGuid = Guid.Parse(_jobId);
         using (_importStream = _importSource.OpenRead())
         {
             var parser = new NTriplesParser();
             _importTripleSink = new StoreTripleSink(_worker.WriteStore, jobGuid,
                                                     Configuration.TransactionFlushTripleCount);
             parser.Parse(_importStream, this, _graphUri);
             _importStream.Close();
         }
         _worker.WriteStore.Commit(jobGuid);
         _worker.InvalidateReadStore();
     }
     catch (RdfParserException parserException)
     {
         Logging.LogError(
             BrightstarEventId.ImportDataError,
             "Encountered parser error : {0}", parserException);
         _statusCallback(_jobId,
                         String.Format("Import failed due to parser error: {0}", parserException));
         Errors = true;
         ErrorMessage = parserException.HaveLineNumber
                            ? String.Format("Parser error at line {0}: {1}", parserException.LineNumber,
                                            parserException.Message)
                            : String.Format("Parser error: {0}", parserException.Message);
     }
     catch (Exception ex)
     {
         Logging.LogError(BrightstarEventId.JobProcessingError,
                          "Error processing import job on source " + _importSource + ". Error Message: " +
                          ex.Message + " Stack trace: " + ex.StackTrace);
         throw;
     }
 }
        public void TestBrightstarParserStillFaster()
        {
            var t = new Stopwatch();
            t.Start();
            using (var fs = new FileStream(TestPaths.DataPath + "BSBM_370k.nt", FileMode.Open))
            {
                var parser = new NTriplesParser();
                parser.Parse(fs, new NoopParser(), Constants.DefaultGraphUri);                
            }
            t.Stop();
            Console.WriteLine("Time for Brightstar Parser is " + t.ElapsedMilliseconds);

            var t2 = new Stopwatch();
            t2.Start();
            using (var fs = new FileStream(TestPaths.DataPath + "BSBM_370k.nt", FileMode.Open))
            {
                var parser = new VDS.RDF.Parsing.NTriplesParser();
                parser.Load(new NoopParser(), new StreamReader(fs));
            }
            t2.Stop();
            Console.WriteLine("Time for dotNetRDF Parser is " + t2.ElapsedMilliseconds);

            Assert.That(t.ElapsedMilliseconds, Is.LessThan(t2.ElapsedMilliseconds));
        }
Esempio n. 8
0
        public void TestExportWhileWriting()
        {
            const int firstBatchSize = 50000;
            var storeName = Guid.NewGuid().ToString();
            var client = GetClient();
            client.CreateStore(storeName);
            var batch1 = MakeTriples(0, firstBatchSize);
            var batch2 = MakeTriples(firstBatchSize, firstBatchSize+1000);
            var batch3 = MakeTriples(firstBatchSize+1000, firstBatchSize+2000);
            var batch4 = MakeTriples(firstBatchSize+2000, firstBatchSize+3000);

            // Verify batch size
            var p = new NTriplesParser();
            var counterSink = new CounterTripleSink();
            p.Parse(new StringReader(batch1), counterSink, Constants.DefaultGraphUri);
            Assert.AreEqual(firstBatchSize, counterSink.Count);

            var jobInfo = client.ExecuteTransaction(storeName, new UpdateTransactionData {InsertData = batch1});
            Assert.AreEqual(true, jobInfo.JobCompletedOk);

            // Second export with parallel store writes
            var exportJobInfo = client.StartExport(storeName, storeName + "_export.nt", label:"Export Data");
            Assert.That(exportJobInfo.Label, Is.EqualTo("Export Data"));
            jobInfo = client.ExecuteTransaction(storeName, new UpdateTransactionData{InsertData = batch2});
            Assert.AreEqual(true, jobInfo.JobCompletedOk);
            exportJobInfo = client.GetJobInfo(storeName, exportJobInfo.JobId);
            if (exportJobInfo.JobCompletedWithErrors)
            {
                Assert.Fail("Export job completed with errors: {0} : {1}", exportJobInfo.StatusMessage, exportJobInfo.ExceptionInfo);
            }
            if (exportJobInfo.JobCompletedOk)
            {
                Assert.Inconclusive("Export job completed before end of first concurrent import job.");
            }
            Assert.That(exportJobInfo.Label, Is.EqualTo("Export Data"));
            jobInfo = client.ExecuteTransaction(storeName, new UpdateTransactionData{InsertData= batch3});
            Assert.AreEqual(true, jobInfo.JobCompletedOk);
            jobInfo = client.ExecuteTransaction(storeName, new UpdateTransactionData{InsertData = batch4});
            Assert.AreEqual(true, jobInfo.JobCompletedOk);
            while (!exportJobInfo.JobCompletedOk)
            {
                Assert.IsFalse(exportJobInfo.JobCompletedWithErrors);
                Thread.Sleep(1000);
                exportJobInfo = client.GetJobInfo(storeName, exportJobInfo.JobId);
            }

            var exportFile = new FileInfo(Path.Combine(Configuration.StoreLocation, "import", storeName + "_export.nt"));
            Assert.IsTrue(exportFile.Exists);
            var lineCount = File.ReadAllLines(exportFile.FullName).Count(x => !String.IsNullOrEmpty(x));
            Assert.AreEqual(firstBatchSize, lineCount);
        }
Esempio n. 9
0
 public void Run()
 {
     var p = new NTriplesParser();
     using (var fileReader = new StreamReader(_srcPath))
     {
         p.Parse(fileReader, this, Constants.DefaultGraphUri);
     }
 }
Esempio n. 10
0
        public override void Run()
        {
            try
            {
                StoreWorker.TransactionLog.LogStartTransaction(this);

                var writeStore = StoreWorker.WriteStore;

                // process preconditions
                Logging.LogInfo("UpdateTransaction {0} - processing preconditions", JobId);
                try
                {
                    var preconditionSink = new PreconditionSink(writeStore);
                    var parser = new NTriplesParser();
                    parser.Parse(new StringReader(_preconditions), preconditionSink, _defaultGraphUri);
                    if (preconditionSink.FailedPreconditionCount > 0)
                    {
                        throw new PreconditionFailedException(preconditionSink.FailedPreconditionCount, preconditionSink.GetFailedPreconditions());
                    }
                }
                catch (RdfParserException parserException)
                {
                    throw new BrightstarClientException("Syntax error in preconditions.", parserException);
                }

                // process deletes 
                Logging.LogInfo("UpdateTransaction {0} - processing deletes", JobId);
                try
                {
                    var delSink = new DeletePatternSink(writeStore);
                    var parser = new NTriplesParser();
                    parser.Parse(new StringReader(_deletePatterns), delSink, _defaultGraphUri);
                }
                catch (RdfParserException parserException)
                {
                    throw new BrightstarClientException("Syntax error in delete patterns.", parserException);
                }

                try
                {
                    // insert data
                    Logging.LogInfo("UpdateTransaction {0} - processing inserts", JobId);
                    var parser = new NTriplesParser();
                    parser.Parse(new StringReader(_insertData),
                                 new StoreTripleSink(writeStore, JobId, Configuration.TransactionFlushTripleCount),
                                 _defaultGraphUri);
                }
                catch (RdfParserException parserException)
                {
                    throw new BrightstarClientException("Syntax error in triples to add.", parserException);
                }

                // commit changes
                Logging.LogInfo("UpdateTransaction {0} - committing changes", JobId);
                writeStore.Commit(JobId);

                // change read store
                Logging.LogInfo("UpdateTransaction {0} - invalidating read store", JobId);
                StoreWorker.InvalidateReadStore();

                // log txn completed 
                Logging.LogInfo("UpdateTransaction {0} - logging completion", JobId);
                StoreWorker.TransactionLog.LogEndSuccessfulTransaction(this);
                Logging.LogInfo("UpdateTransaction {0} - done", JobId);
            }
            catch (PreconditionFailedException ex)
            {
                StoreWorker.TransactionLog.LogEndFailedTransaction(this);
                Logging.LogInfo("Preconditions failed in UpdateTransaction ({0}): Count={1}, Triples={2}", JobId, ex.FailureCount, ex.FailedTriples);
                throw;
            }
            catch (BrightstarClientException ex)
            {
                StoreWorker.TransactionLog.LogEndFailedTransaction(this);
                Logging.LogError(BrightstarEventId.TransactionClientError,
                                 "Client error reported in UpdateTransaction ({0}): {1}", JobId, ex.InnerException.ToString());
                throw;
            }
            catch (Exception ex)
            {
                StoreWorker.TransactionLog.LogEndFailedTransaction(this);
                Logging.LogError(BrightstarEventId.TransactionServerError,
                                 "Unexpected exception caught in UpdateTransaction ({0}): {1}",JobId, ex);
                throw;
            }
        }
        public void TestBackslashEscape()
        {
            const string ntriples = @"<http://example.org/s> <http://example.org/p1> ""c:\\users""
<http://example.org/s> <http://example.org/p2> ""\\users\\tom""";
            var parser = new NTriplesParser();
            var sink = new LoggingTripleSink();
            parser.Parse(new StringReader(ntriples), sink, "http://example.org/g" );

            Assert.That(sink.Triples, Has.Count.EqualTo(2));
            var triple1 = sink.Triples.FirstOrDefault(t => t.Predicate.Equals("http://example.org/p1"));
            var triple2 = sink.Triples.FirstOrDefault(t => t.Predicate.Equals("http://example.org/p2"));
            Assert.That(triple1, Is.Not.Null);
            Assert.That(triple1.IsLiteral);
            Assert.That(triple1.Object, Is.EqualTo(@"c:\users"));
            Assert.That(triple2, Is.Not.Null);
            Assert.That(triple2.IsLiteral);
            Assert.That(triple2.Object, Is.EqualTo(@"\users\tom"));
        }
 public void TestBasicNQuads()
 {
     var ntp = new NTriplesParser();
     ntp.Parse(new FileStream(TestPaths.DataPath+"nquads.txt", FileMode.Open), new NoopParser(), Constants.DefaultGraphUri);
 }
Esempio n. 13
0
 /// <summary>
 /// Imports triples from the stream provided
 /// </summary>
 /// <param name="store">The store to import triples into</param>
 /// <param name="jobId">The GUID identifier for the import job</param>
 /// <param name="triples">The stream to read the triples from</param>
 /// <param name="graphUri">The URI of the graph to import the triples into, or null to import into the default graph</param>
 public static void Import(this IStore store, Guid jobId, Stream triples, Uri graphUri = null)
 {
     var tripleParser = new NTriplesParser();
     tripleParser.Parse(triples, new StoreTripleSink(store, jobId, 500000),
                        graphUri == null ? Constants.DefaultGraphUri : graphUri.ToString());
 }
Esempio n. 14
0
        public void TestExportWhileWriting()
        {
            const int firstBatchSize = 50000;
            var storeName = Guid.NewGuid().ToString();
            var client = GetClient();
            client.CreateStore(storeName);
            var batch1 = MakeTriples(0, firstBatchSize);
            var batch2 = MakeTriples(firstBatchSize, firstBatchSize+1000);
            var batch3 = MakeTriples(firstBatchSize+1000, firstBatchSize+2000);
            var batch4 = MakeTriples(firstBatchSize+2000, firstBatchSize+3000);

            // Verify batch size
            var p = new NTriplesParser();
            var counterSink = new CounterTripleSink();
            p.Parse(new StringReader(batch1), counterSink, Constants.DefaultGraphUri);
            Assert.AreEqual(firstBatchSize, counterSink.Count);

            var jobInfo = client.ExecuteTransaction(storeName, new UpdateTransactionData
                {
                    ExistencePreconditions = String.Empty,
                    DeletePatterns = String.Empty,
                    InsertData = batch1
                });
            jobInfo = WaitForJob(jobInfo, client, storeName);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);

            // Second export with parallel store writes
            var exportJobInfo = client.StartExport(storeName, storeName + "_export.nt");

            jobInfo = client.ExecuteTransaction(storeName,
                                                new UpdateTransactionData
                                                    {
                                                        ExistencePreconditions = null,
                                                        DeletePatterns = null,
                                                        InsertData = batch2
                                                    });
            jobInfo = WaitForJob(jobInfo, client, storeName);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);

            exportJobInfo = client.GetJobInfo(storeName, exportJobInfo.JobId);
            if (exportJobInfo.JobCompletedWithErrors)
            {
                Assert.Fail("Export job completed with errors: {0} : {1}", exportJobInfo.StatusMessage, exportJobInfo.ExceptionInfo);
            }
            if (exportJobInfo.JobCompletedOk)
            {
                Assert.Inconclusive("Export job completed before end of first concurrent import job.");
            }
            
            jobInfo = client.ExecuteTransaction(storeName, new UpdateTransactionData{InsertData= batch3});
            jobInfo = WaitForJob(jobInfo, client, storeName);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);

            jobInfo = client.ExecuteTransaction(storeName, new UpdateTransactionData{InsertData = batch4});
            jobInfo = WaitForJob(jobInfo, client, storeName);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);

            exportJobInfo = WaitForJob(exportJobInfo, client, storeName);
            Assert.That(exportJobInfo.JobCompletedOk);

            var exportFile = new FileInfo("c:\\brightstar\\import\\" + storeName + "_export.nt");
            Assert.IsTrue(exportFile.Exists);
            var lineCount = File.ReadAllLines(exportFile.FullName).Count(x => !String.IsNullOrEmpty(x));
            Assert.AreEqual(firstBatchSize, lineCount);
        }
Esempio n. 15
0
        public void TestExportWhileWriting()
        {
            var storeName = Guid.NewGuid().ToString();
            var client = GetClient();
            client.CreateStore(storeName);
            var batch1 = MakeTriples(0, 50000);
            var batch2 = MakeTriples(50000, 51000);
            var batch3 = MakeTriples(51000, 52000);
            var batch4 = MakeTriples(52000, 53000);

            // Verify batch size
            var p = new NTriplesParser();
            var counterSink = new CounterTripleSink();
            p.Parse(new StringReader(batch1), counterSink, Constants.DefaultGraphUri);
            Assert.AreEqual(50000, counterSink.Count);

            var jobInfo = client.ExecuteTransaction(storeName, String.Empty, String.Empty, batch1);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);

            // Second export with parallel store writes
            var exportJobInfo = client.StartExport(storeName, storeName + "_export.nt");
            jobInfo = client.ExecuteTransaction(storeName, null, null, batch2);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);
            exportJobInfo = client.GetJobInfo(storeName, exportJobInfo.JobId);
            Assert.IsTrue(exportJobInfo.JobStarted, "Test inconclusive - export job completed before end of first concurrent import job."); // This is just to check that the export is still running while at least one commit occurs
            jobInfo = client.ExecuteTransaction(storeName, null, null, batch3);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);
            jobInfo = client.ExecuteTransaction(storeName, null, null, batch4);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);
            while (!exportJobInfo.JobCompletedOk)
            {
                Assert.IsFalse(exportJobInfo.JobCompletedWithErrors);
                Thread.Sleep(1000);
                exportJobInfo = client.GetJobInfo(storeName, exportJobInfo.JobId);
            }

            var exportFile = new FileInfo("c:\\brightstar\\import\\" + storeName + "_export.nt");
            Assert.IsTrue(exportFile.Exists);
            var lineCount = File.ReadAllLines(exportFile.FullName).Where(x => !String.IsNullOrEmpty(x)).Count();
            Assert.AreEqual(50000, lineCount);
        }
Esempio n. 16
0
        public void TestExportWhileWriting()
        {
            int firstBatchSize = 50000;
            var storeName = Guid.NewGuid().ToString();
            var client = GetClient();
            client.CreateStore(storeName);
            var batch1 = MakeTriples(0, firstBatchSize);
            var batch2 = MakeTriples(firstBatchSize, firstBatchSize+1000);
            var batch3 = MakeTriples(firstBatchSize+1000, firstBatchSize+2000);
            var batch4 = MakeTriples(firstBatchSize+2000, firstBatchSize+3000);

            // Verify batch size
            var p = new NTriplesParser();
            var counterSink = new CounterTripleSink();
            p.Parse(new StringReader(batch1), counterSink, Constants.DefaultGraphUri);
            Assert.AreEqual(firstBatchSize, counterSink.Count);

            var jobInfo = client.ExecuteTransaction(storeName, String.Empty, String.Empty, batch1);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);

            // Second export with parallel store writes
            var exportJobInfo = client.StartExport(storeName, storeName + "_export.nt");
            jobInfo = client.ExecuteTransaction(storeName, null, null, batch2);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);
            exportJobInfo = client.GetJobInfo(storeName, exportJobInfo.JobId);
            if (exportJobInfo.JobCompletedWithErrors)
            {
                Assert.Fail("Export job completed with errors: {0} : {1}", exportJobInfo.StatusMessage, exportJobInfo.ExceptionInfo);
            }
            if (exportJobInfo.JobCompletedOk)
            {
                Assert.Inconclusive("Export job completed before end of first concurrent import job.");
            }
            jobInfo = client.ExecuteTransaction(storeName, null, null, batch3);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);
            jobInfo = client.ExecuteTransaction(storeName, null, null, batch4);
            Assert.AreEqual(true, jobInfo.JobCompletedOk);
            while (!exportJobInfo.JobCompletedOk)
            {
                Assert.IsFalse(exportJobInfo.JobCompletedWithErrors);
                Thread.Sleep(1000);
                exportJobInfo = client.GetJobInfo(storeName, exportJobInfo.JobId);
            }

            FileInfo exportFile;
            if (client is BrightstarRestClient)
            {
                exportFile = new FileInfo("c:\\brightstar\\import\\" + storeName + "_export.nt");
            }
            else
            {
                exportFile = new FileInfo("brightstar\\import\\" + storeName + "_export.nt");
            }
            Assert.IsTrue(exportFile.Exists);
            var lineCount = File.ReadAllLines(exportFile.FullName).Where(x => !String.IsNullOrEmpty(x)).Count();
            Assert.AreEqual(firstBatchSize, lineCount);
        }