protected override async Task ProcessBatch(CollectorHttpClient client, IList<JObject> items, JObject context) { List<Task<IGraph>> tasks = new List<Task<IGraph>>(); foreach (JObject item in items) { if (Utils.IsType(context, item, Constants.Package)) { Uri itemUri = item["url"].ToObject<Uri>(); tasks.Add(client.GetGraphAsync(itemUri)); } } if (tasks.Count > 0) { await Task.WhenAll(tasks.ToArray()); TripleStore store = new TripleStore(); foreach (Task<IGraph> task in tasks) { store.Add(task.Result, true); } await ProcessStore(store); } }
public void SparqlBindLazy2() { String query = "PREFIX fn: <" + XPathFunctionFactory.XPathFunctionsNamespace + "> SELECT * WHERE { ?s ?p ?o . BIND(fn:concat(STR(?s), ' ', STR(?p), ' ', STR(?o)) AS ?triple) } LIMIT 10"; TripleStore store = new TripleStore(); Graph g = new Graph(); FileLoader.Load(g, "InferenceTest.ttl"); store.Add(g); SparqlQueryParser parser = new SparqlQueryParser(); SparqlQuery q = parser.ParseFromString(query); Console.WriteLine(q.ToAlgebra().ToString()); Assert.IsTrue(q.ToAlgebra().ToString().Contains("LazyBgp"), "Should have been optimised to use a Lazy BGP"); Console.WriteLine(); Object results = q.Evaluate(store); if (results is SparqlResultSet) { SparqlResultSet rset = (SparqlResultSet)results; foreach (SparqlResult r in rset) { Console.WriteLine(r.ToString()); } Assert.IsTrue(rset.Count == 10, "Expected exactly 10 results"); Assert.IsTrue(rset.All(r => r.HasValue("s") && r.HasValue("p") && r.HasValue("o") && r.HasValue("triple")), "Expected ?s, ?p, ?o and ?triple values for every result"); } else { Assert.Fail("Expected a SPARQL Result Set"); } }
public static dynamic GetDyno(string data, bool autoquotation = true, bool treatUri = true, bool skipTriplesWithEmptyObject = false, bool mindAsterisk = false, bool useStore = false, string defaultGraphUri = "http://test.org/defaultgraph") { DynamicSPARQLSpace.dotNetRDF.Connector connector = null; if (useStore) { var store = new VDS.RDF.TripleStore(); store.LoadFromString(data); connector = new Connector(new InMemoryDataset(store, new Uri(defaultGraphUri))); } else { var graph = new VDS.RDF.Graph(); graph.LoadFromString(data); connector = new Connector(new InMemoryDataset(graph)); } dynamic dyno = DynamicSPARQL.CreateDyno(connector.GetQueryingFunction(), updateFunc: connector.GetUpdateFunction(), autoquotation: autoquotation, treatUri: treatUri, skipTriplesWithEmptyObject:skipTriplesWithEmptyObject, mindAsterisk:mindAsterisk); return dyno; }
public MetadataSource(Options opts) { if (!String.IsNullOrEmpty(opts.EndpointUri)) { if (String.IsNullOrEmpty(opts.DefaultGraphUri)) { this._processor = new RemoteQueryProcessor(new SparqlRemoteEndpoint(new Uri(opts.EndpointUri))); } else { this._processor = new RemoteQueryProcessor(new SparqlRemoteEndpoint(new Uri(opts.EndpointUri), opts.DefaultGraphUri)); } } else if (!String.IsNullOrEmpty(opts.SourceFile)) { TripleStore store = new TripleStore(); Graph g = new Graph(); FileLoader.Load(g, opts.SourceFile); store.Add(g); this._processor = new LeviathanQueryProcessor(store); } else { throw new Exception("Must specify an endpoint or a file to query"); } }
async Task ProcessStore(TripleStore store) { try { string baseAddress = _storage.BaseAddress + _storage.Container + "/resolver/"; SparqlResultSet registrationDeletes = SparqlHelpers.Select(store, Utils.GetResource("sparql.SelectDeleteRegistration.rq")); foreach (SparqlResult row in registrationDeletes) { string id = row["id"].ToString(); Uri resourceUri = new Uri(baseAddress + id + ".json"); await _storage.Delete(resourceUri); } SparqlResultSet packageDeletes = SparqlHelpers.Select(store, Utils.GetResource("sparql.SelectDeletePackage.rq")); foreach (SparqlResult row in packageDeletes) { string id = row["id"].ToString(); string version = row["version"].ToString(); Uri resourceUri = new Uri(baseAddress + id + ".json"); await DeletePackage(resourceUri, version); } } finally { store.Dispose(); } }
protected override async Task<bool> OnProcessBatch(CollectorHttpClient client, IEnumerable<JToken> items, JToken context, DateTime commitTimeStamp, bool isLastBatch, CancellationToken cancellationToken) { List<Task<IGraph>> tasks = new List<Task<IGraph>>(); foreach (JObject item in items) { if (Utils.IsType((JObject)context, item, _types)) { Uri itemUri = item["@id"].ToObject<Uri>(); tasks.Add(client.GetGraphAsync(itemUri)); } } if (tasks.Count > 0) { await Task.WhenAll(tasks.ToArray()); TripleStore store = new TripleStore(); foreach (Task<IGraph> task in tasks) { store.Add(task.Result, true); } await ProcessStore(store, cancellationToken); } return true; }
public void SparqlBind() { String query = "PREFIX fn: <" + XPathFunctionFactory.XPathFunctionsNamespace + "> SELECT ?triple WHERE { ?s ?p ?o . BIND(fn:concat(STR(?s), ' ', STR(?p), ' ', STR(?o)) AS ?triple) }"; TripleStore store = new TripleStore(); Graph g = new Graph(); FileLoader.Load(g, "InferenceTest.ttl"); store.Add(g); SparqlQueryParser parser = new SparqlQueryParser(); SparqlQuery q = parser.ParseFromString(query); Object results = q.Evaluate(store); if (results is SparqlResultSet) { SparqlResultSet rset = (SparqlResultSet)results; foreach (SparqlResult r in rset) { Console.WriteLine(r.ToString()); } Assert.IsTrue(rset.Count > 0, "Expected 1 or more results"); } else { Assert.Fail("Expected a SPARQL Result Set"); } }
static object Execute(TripleStore store, string sparql) { InMemoryDataset ds = new InMemoryDataset(store); ISparqlQueryProcessor processor = new LeviathanQueryProcessor(ds); SparqlQueryParser sparqlparser = new SparqlQueryParser(); SparqlQuery query = sparqlparser.ParseFromString(sparql); return processor.ProcessQuery(query); }
/// <summary> /// Construictir for MyLoFacebookContextReader /// </summary> /// <param name="gpsl">A GPSLookup instance implementing IGPSlookup</param> public MyLoFacebookContextReader(IGPSlookup gpsl) { _store = new TripleStore(); _g = new Graph(); _g.BaseUri = new Uri("http://facebook.MyLo.com"); _store.Add(_g); gpsLookup = gpsl; }
protected static IInMemoryQueryableStore CreateMemoryStore() { string musicOntology = Settings.Default.testStoreLocation; TripleStore store = new TripleStore(); Graph g = new Graph(); FileLoader.Load(g, musicOntology); store.Add(g); return store; }
private static IInMemoryQueryableStore CreateMemoryStore() { TripleStore result = new TripleStore(); string serialisedLocation = Settings.Default.testStoreLocation; Graph g = new Graph(); FileLoader.Load(g, serialisedLocation); result.Add(g); return result; }
private static IEnumerable<EntityQuad> GetGraphWithBlankNodes() { var store = new TripleStore(); store.LoadFromString(Resource.AsString("TestGraphs.BlankNodes.trig")); Debug.WriteLine("Loading original graph with {0} triples", store.Graphs[GraphUri].Triples.Count()); return from triple in store.Graphs[GraphUri].Triples select triple.ToEntityQuad(EntityId); }
/// <summary> /// Currently the referenced instances are not contained in the resource graph. /// The shacl validator would therefore throw a error, which is suppressed by this function to the metadata graph. /// </summary> /// <param name="store">InMemory store with metadata</param> private void ModifyShapesForShaclClass(VDS.RDF.TripleStore store) { store.ExecuteUpdate(@" PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX sh: <http://www.w3.org/ns/shacl#> DELETE { graph ?g { ?s sh:class ?o } } WHERE { graph ?g { ?s sh:class ?o } } "); }
/// <summary> /// Classes must be referenced as target class. This must be added to the shacls. /// </summary> /// <param name="store">InMemory store with metadata</param> private void ModifiyShapesForTargetClass(VDS.RDF.TripleStore store) { store.ExecuteUpdate(@" PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX sh: <http://www.w3.org/ns/shacl#> INSERT { graph ?g { ?subclass sh:targetClass ?subclass } } WHERE { graph ?g { ?subclass a ?type } } "); }
protected override async Task ProcessStore(TripleStore store, CancellationToken cancellationToken) { string sparql = Utils.GetResource("sparql.SelectDistinctDependency.rq"); foreach (SparqlResult row in SparqlHelpers.Select(store, sparql)) { Result.Add(row["id"].ToString()); } await Task.Run(() => { }, cancellationToken); }
public void InitialiseStore(string storeLocation) { TripleStore store = new TripleStore(); //store.AddReasoner(new Euler(new N3Reader(MusicConstants.OntologyURL))); if (File.Exists(storeLocation)) { Graph g = new Graph(); FileLoader.Load(g, storeLocation); store.Add(g); } }
protected override async Task ProcessStore(TripleStore store, CancellationToken cancellationToken) { try { SparqlResultSet packages = SparqlHelpers.Select(store, Utils.GetResource("sparql.SelectPackage.rq")); IList<Uri> packageUris = new List<Uri>(); foreach (SparqlResult row in packages) { Uri packageUri = ((IUriNode)row["package"]).Uri; packageUris.Add(packageUri); } IList<XDocument> nuspecs = new List<XDocument>(); foreach (Uri packageUri in packageUris) { XDocument nuspec = new XDocument(); XElement metadata = CreateNuspecMetadata(store, packageUri); XElement tags = CreateNuspecMetadataTags(store, packageUri); if (tags != null) { metadata.Add(tags); } XElement dependencies = CreateNuspecMetadataDependencies(store, packageUri); if (dependencies != null) { metadata.Add(dependencies); } //TODO: references, reference groups etc. XElement frameworkAssemblies = CreateNuspecMetadataFrameworkAssembly(store, packageUri); if (frameworkAssemblies != null) { metadata.Add(frameworkAssemblies); } nuspec.Add(new XElement(nuget.GetName("package"), metadata)); nuspecs.Add(nuspec); } await SaveAllNuspecs(nuspecs, cancellationToken); } catch (Exception e) { Trace.TraceError(e.Message); } }
private static void ExpandGraphs(this ITripleStore store, TripleStore targetStore, Uri metaGraphUri) { IGraph metaGraph = store.AddGraph(metaGraphUri); foreach (Triple triple in targetStore.Triples) { IUriNode subject = (triple.Subject is IBlankNode ? targetStore.FindOwningSubject((IBlankNode)triple.Subject) : (IUriNode)triple.Subject); if (subject != null) { IGraph graph = store.GetGraph(metaGraphUri, subject.Uri); graph.Assert(graph.Import(triple)); } } }
protected override async Task ProcessStore(TripleStore store) { ResolverCollectorEventSource.Log.ProcessingBatch(BatchCount); try { SparqlResultSet distinctIds = SparqlHelpers.Select(store, Utils.GetResource("sparql.SelectDistinctPackage.rq")); IDictionary<Uri, IGraph> resolverResources = new Dictionary<Uri, IGraph>(); foreach (SparqlResult row in distinctIds) { string id = row["id"].ToString(); SparqlParameterizedString sparql = new SparqlParameterizedString(); sparql.CommandText = Utils.GetResource("sparql.ConstructResolverGraph.rq"); string baseAddress = _storage.BaseAddress.ToString(); sparql.SetLiteral("id", id); sparql.SetLiteral("base", baseAddress); sparql.SetLiteral("extension", ".json"); sparql.SetLiteral("galleryBase", GalleryBaseAddress); sparql.SetLiteral("contentBase", ContentBaseAddress); IGraph packageRegistration = SparqlHelpers.Construct(store, sparql.ToString()); if (packageRegistration.Triples.Count == 0) { throw new Exception("packageRegistration.Triples.Count == 0"); } Uri registrationUri = new Uri(baseAddress + id.ToLowerInvariant() + ".json"); resolverResources.Add(registrationUri, packageRegistration); } if (resolverResources.Count != distinctIds.Count) { throw new Exception("resolverResources.Count != distinctIds.Count"); } await MergeAll(resolverResources); } finally { ResolverCollectorEventSource.Log.ProcessedBatch(BatchCount); store.Dispose(); } }
static IDictionary<RegistrationEntryKey, RegistrationCatalogEntry> GetResources(IGraph graph) { IDictionary<RegistrationEntryKey, RegistrationCatalogEntry> resources = new Dictionary<RegistrationEntryKey, RegistrationCatalogEntry>(); TripleStore store = new TripleStore(); store.Add(graph); IList<Uri> existingItems = ListExistingItems(store); foreach (Uri existingItem in existingItems) { AddExistingItem(resources, store, existingItem); } return resources; }
static IList<Uri> ListExistingItems(TripleStore store) { string sparql = Utils.GetResource("sparql.SelectInlinePackage.rq"); SparqlResultSet resultSet = SparqlHelpers.Select(store, sparql); IList<Uri> results = new List<Uri>(); foreach (SparqlResult result in resultSet) { IUriNode item = (IUriNode)result["catalogPackage"]; results.Add(item.Uri); } Trace.TraceInformation("RegistrationPersistence.ListExistingItems results = {0}", results.Count); return results; }
private VDS.RDF.TripleStore CreateNewTripleStore(IDictionary <string, string> graphs) { var store = new VDS.RDF.TripleStore(); foreach (var graph in graphs) { var g = new VDS.RDF.Graph(); g.BaseUri = new Uri(graph.Value); var ttlparser = new TurtleParser(); ttlparser.Load(g, AppDomain.CurrentDomain.BaseDirectory + $"Setup/Graphs/{graph.Key}"); store.Add(g); } ; // TODO: Check if usesGraph is in graphGraph return(store); }
public TripleFixture() { var store = new TripleStore(); string path = System.IO.Path.Combine(Directory.GetParent(Environment.CurrentDirectory).Parent.FullName, "TestTripleStores\\triple.Fixture.ttl"); store.LoadFromFile(path); var graph = store.Graphs.First(); var connector = new DynamicSPARQLSpace.dotNetRDF.Connector(new InMemoryDataset(graph)); //Func<string, SparqlResultSet> sendSPARQLQuery = xquery => graph.ExecuteQuery(xquery) as SparqlResultSet; dyno = DynamicSPARQL.CreateDyno(connector.GetQueryingFunction(), autoquotation: true); dyno.Prefixes = new[]{ SPARQL.Prefix(prefix:"rdf:", iri: "http://www.w3.org/1999/02/22-rdf-syntax-ns#" ), SPARQL.Prefix(prefix:"rdf:", iri: "http://www.w3.org/1999/02/22-rdf-syntax-ns#" ), SPARQL.Prefix(prefix:"cat:", iri: "http://my.web/catalogues#" ), SPARQL.Prefix(prefix:"cp:", iri: "http://my.web/catalogues/predicates/" ) }; }
public void GraphCollectionWebDemand2() { //Test that on-demand loading does not kick in for pre-existing graphs TripleStore store = new TripleStore(new WebDemandGraphCollection()); Graph g = new Graph(); Uri u = new Uri("http://www.dotnetrdf.org/configuration#"); g.LoadFromUri(u); g.BaseUri = u; Graph empty = new Graph(); empty.BaseUri = g.BaseUri; store.Add(empty); Assert.IsTrue(store.HasGraph(g.BaseUri), "Graph Collection should contain the Graph"); Assert.AreNotEqual(g, store[g.BaseUri], "Graphs should not be equal"); }
private IGraph ExecuteDiff(IGraph older, IGraph newer, Uri graphUri = null) { var diff = new GraphDiff().Difference(older, newer); var update = diff.AsUpdate(graphUri); var sparql = update.ToString(); output.WriteLine(sparql); older = older ?? new Graph(); var ts = new TripleStore(); ts.Add(older); var store = new InMemoryManager(ts) as IUpdateableStorage; store.Update(sparql); return(older); }
public void GraphEventBubbling() { try { this._graphAdded = false; this._graphRemoved = false; this._graphChanged = false; //Create Store and Graph add attach handlers to Store TripleStore store = new TripleStore(); Graph g = new Graph(); store.GraphAdded += this.HandleGraphAdded; store.GraphRemoved += this.HandleGraphRemoved; store.GraphChanged += this.HandleGraphChanged; //Add the Graph to the Store which should fire the GraphAdded event store.Add(g); Assert.IsTrue(this._graphAdded, "GraphAdded event of the Triple Store should have fired"); //Assert a Triple INode s = g.CreateBlankNode(); INode p = g.CreateUriNode("rdf:type"); INode o = g.CreateUriNode("rdfs:Class"); Triple t = new Triple(s, p, o); g.Assert(t); Assert.IsTrue(this._graphChanged, "GraphChanged event of the Triple Store should have fired"); //Retract the Triple this._graphChanged = false; g.Retract(t); Assert.IsTrue(this._graphChanged, "GraphChanged event of the Triple Store should have fired"); //Remove the Graph from the Store which should fire the GraphRemoved event store.Remove(g.BaseUri); Assert.IsTrue(this._graphRemoved, "GraphRemoved event of the Triple Store should have fired"); } catch (Exception ex) { throw; } }
async Task ProcessStore(TripleStore store) { try { SparqlResultSet distinctIds = SparqlHelpers.Select(store, Utils.GetResource("sparql.SelectDistinctPackage.rq")); IDictionary<Uri, IGraph> resolverResources = new Dictionary<Uri, IGraph>(); foreach (SparqlResult row in distinctIds) { string id = row["id"].ToString(); SparqlParameterizedString sparql = new SparqlParameterizedString(); sparql.CommandText = Utils.GetResource("sparql.ConstructResolverGraph.rq"); string baseAddress = _storage.BaseAddress + _storage.Container + "/resolver/"; sparql.SetLiteral("id", id); sparql.SetLiteral("base", baseAddress); sparql.SetLiteral("extension", ".json"); IGraph packageRegistration = SparqlHelpers.Construct(store, sparql.ToString()); Uri registrationUri = new Uri(baseAddress + id.ToLowerInvariant() + ".json"); resolverResources.Add(registrationUri, packageRegistration); } if (resolverResources.Count != distinctIds.Count) { throw new Exception("resolverResources.Count != distinctIds.Count"); } await MergeAll(resolverResources); } finally { store.Dispose(); } }
public void SparqlBindToExistingVariableLazy() { String query = "PREFIX fn: <" + XPathFunctionFactory.XPathFunctionsNamespace + "> SELECT * WHERE { ?s ?p ?o . BIND(?s AS ?p) } LIMIT 1"; TripleStore store = new TripleStore(); Graph g = new Graph(); FileLoader.Load(g, "InferenceTest.ttl"); store.Add(g); SparqlQueryParser parser = new SparqlQueryParser(); try { SparqlQuery q = parser.ParseFromString(query); Console.WriteLine(q.ToAlgebra().ToString()); Assert.IsTrue(q.ToAlgebra().ToString().Contains("LazyBgp"), "Should have been optimised to use a Lazy BGP"); Console.WriteLine(); store.ExecuteQuery(q); Assert.Fail("Expected a RdfParseException/RdfQueryException to be thrown"); } catch (RdfParseException parseEx) { Console.WriteLine("Parsing Error thrown as expected"); TestTools.ReportError("Parser Error", parseEx); } catch (RdfQueryException queryEx) { Console.WriteLine("Query Error thrown as expected"); TestTools.ReportError("Query Error", queryEx); } catch (Exception ex) { TestTools.ReportError("Unexpected Error", ex); Assert.Fail("Did not get a RdfParseException/RdfQueryException as expected"); } }
public static ISPARQLQueryable <T> GetQuerable <T>(string data, bool autoquotation = true, bool treatUri = true, IEnumerable <Prefix> prefixes = null, bool skipTriplesWithEmptyObject = false, bool mindAsterisk = false, bool useStore = false, string defaultGraphUri = "http://test.org/defaultgraph") { DynamicSPARQLSpace.dotNetRDF.Connector connector = null; if (useStore) { var store = new VDS.RDF.TripleStore(); store.LoadFromString(data); connector = new Connector(new InMemoryDataset(store, new Uri(defaultGraphUri))); // var store2 = new VDS.RDF.Query.SparqlRemoteEndpoint(new Uri("http://10.109.219.4:9999/blazegraph/")); //store2.SetCredentials(""); } else { var graph = new VDS.RDF.Graph(); graph.LoadFromFile(data); connector = new Connector(new InMemoryDataset(graph)); } dynamic dyno = DynamicSPARQL.CreateDyno(connector.GetQueryingFunction(), updateFunc: connector.GetUpdateFunction(), autoquotation: autoquotation, treatUri: treatUri, prefixes: prefixes, skipTriplesWithEmptyObject: skipTriplesWithEmptyObject, mindAsterisk: mindAsterisk); return(new SPARQLQuery <T>(dyno)); }
public static void ApplyInference(IGraph graph, IGraph schema) { string inverseOf = @" PREFIX owl: <http://www.w3.org/2002/07/owl#> CONSTRUCT { ?y ?q ?x } WHERE { ?p owl:inverseOf ?q . ?x ?p ?y . } "; var parser = new SparqlQueryParser(); var rules = new List<SparqlQuery>(); rules.Add(parser.ParseFromString(inverseOf)); var store = new TripleStore(); store.Add(graph, true); store.Add(schema, true); var queryProcessor = new LeviathanQueryProcessor(store); while (true) { int before = store.Triples.Count(); foreach (var rule in rules) { IGraph inferred = (IGraph)queryProcessor.ProcessQuery(rule); //store.Add(inferred); graph.Merge(inferred); } int after = store.Triples.Count(); if (after == before) { break; } } }
public void SparqlFilterLazyDBPedia() { SparqlParameterizedString query = new SparqlParameterizedString(); query.Namespaces.AddNamespace("rdfs", new Uri(NamespaceMapper.RDFS)); query.CommandText = "SELECT * WHERE {?s ?p ?label . FILTER(ISLITERAL(?label) && LANGMATCHES(LANG(?label), \"en\")) } LIMIT 5"; TripleStore store = new TripleStore(); Graph g = new Graph(); UriLoader.Load(g, new Uri("http://dbpedia.org/resource/Southampton")); store.Add(g); SparqlQueryParser parser = new SparqlQueryParser(); SparqlQuery q = parser.ParseFromString(query); Console.WriteLine(q.ToAlgebra().ToString()); Assert.IsTrue(q.ToAlgebra().ToString().Contains("LazyBgp"), "Should have been optimised to use a Lazy BGP"); Console.WriteLine(); LeviathanQueryProcessor processor = new LeviathanQueryProcessor(AsDataset(store)); Object results = processor.ProcessQuery(q); if (results is SparqlResultSet) { SparqlResultSet rset = (SparqlResultSet)results; foreach (SparqlResult r in rset) { Console.WriteLine(r.ToString()); } Assert.IsTrue(rset.Count == 5, "Expected exactly 5 results"); } else { Assert.Fail("Expected a SPARQL Result Set"); } }
static void AddExistingItem(IDictionary<RegistrationEntryKey, RegistrationCatalogEntry> resources, TripleStore store, Uri catalogEntry) { Trace.TraceInformation("RegistrationPersistence.AddExistingItem: catalogEntry = {0}", catalogEntry); SparqlParameterizedString sparql = new SparqlParameterizedString(); sparql.CommandText = Utils.GetResource("sparql.ConstructCatalogEntryGraph.rq"); sparql.SetUri("catalogEntry", catalogEntry); IGraph graph = SparqlHelpers.Construct(store, sparql.ToString()); resources.Add(RegistrationCatalogEntry.Promote(catalogEntry.AbsoluteUri, graph)); }
public FakeTripleStoreRepository(Dictionary <string, string> graphs) { _store = CreateNewTripleStore(graphs); _dataset = new InMemoryDataset(_store); _processor = new LeviathanQueryProcessor(_dataset); }
public void RunConvert(String[] args) { //Set the Options if (!this.SetOptions(args)) { //If SetOptions returns false then some options were invalid and errors have been output to the error stream return; } //Then we'll read in our inputs foreach (String input in this._inputs) { try { Graph g = new Graph(); if (input.StartsWith("-uri:")) { UriLoader.Load(g, new Uri(input.Substring(input.IndexOf(':') + 1))); } else { FileLoader.Load(g, input); } //If not merging we'll output now if (!this._merge) { String destFile; if (input.StartsWith("-uri:")) { if (this._inputs.Count == 1) { //For a single URI input we require a Filename if (this._output.Equals(String.Empty)) { Console.Error.WriteLine("rdfConvert: When converting a single URI you must specify an output file with the -out:filename argument"); return; } destFile = Path.GetFileNameWithoutExtension(this._output) + this._outExt; } else { //For multiple inputs where some are URIs the output file is the SHA256 hash of the URI plus the extension destFile = new Uri(input.Substring(input.IndexOf(':') + 1)).GetSha256Hash() + this._outExt; } } else { if (this._inputs.Count == 1 && !this._output.Equals(String.Empty)) { //For a single input we'll just change the extension as appropriate if (!this._outExt.Equals(String.Empty)) { destFile = Path.GetFileNameWithoutExtension(this._output) + this._outExt; } else { destFile = this._output; } } else { destFile = Path.GetFileNameWithoutExtension(input) + this._outExt; } } if (File.Exists(destFile) && !this._overwrite) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' because a file already exists at this location and the -overwrite argument was not used"); } else { try { this._writer.Save(g, destFile); } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } } else { //Add to the Store and we'll merge it all together later and output it at the end this._store.Add(g); } } catch (RdfParserSelectionException parseEx) { //If this happens then this may be a datset instead of a graph try { if (input.StartsWith("-uri:")) { UriLoader.Load(this._store, new Uri(input.Substring(input.IndexOf(':') + 1))); } else { FileLoader.Load(this._store, input); } //If not merging we'll output now if (!this._merge) { foreach (IGraph g in this._store.Graphs) { String destFile = (g.BaseUri == null) ? "default-graph" : g.BaseUri.GetSha256Hash(); destFile += this._outExt; if (File.Exists(destFile)) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' because a file already exists at this location and the -overwrite argument was not used"); } else { try { this._writer.Save(g, destFile); } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } } //Reset the Triple Store after outputting this._store.Dispose(); this._store = new TripleStore(); } } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to read from input '" + input + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to read from input '" + input + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } //Then we'll apply merging if applicable //If merge was false then we've already done the outputting as we had no need to keep //stuff in memory if (this._merge) { if (this._storeWriter != null && (this._writer == null || this._dataset)) { //We only have a StoreWriter so we output a Dataset rather than merging if (!this._output.Equals(String.Empty)) { if (File.Exists(this._output) && !this._overwrite) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + this._output + "' because a file already exists at this location and the -overwrite argument was not used"); } else { try { this._storeWriter.Save(this._store, new VDS.RDF.Storage.Params.StreamParams(this._output)); } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + this._output + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } } else { String destFile = (this._inputs.Count == 1 && !this._inputs[0].StartsWith("-uri:")) ? Path.GetFileNameWithoutExtension(this._inputs[0]) + this._outExt : "dataset" + this._outExt; if (File.Exists(destFile) && !this._overwrite) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' because a file already exists at this location and the -overwrite argument was not used"); } else { try { this._storeWriter.Save(this._store, new VDS.RDF.Storage.Params.StreamParams(destFile)); } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } } } else { //Merge all the Graphs together and produce a single Graph Graph mergedGraph = new Graph(); foreach (IGraph g in this._store.Graphs) { mergedGraph.Merge(g); } //Work out the output file and output the Graph String destFile; if (!this._output.Equals(String.Empty)) { destFile = this._output; } else { destFile = "merged-graph" + this._outExt; } if (File.Exists(destFile) && !this._overwrite) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' because a file already exists at this location and the -overwrite argument was not used"); } else { try { this._writer.Save(mergedGraph, destFile); } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } } } }
public static void Main(String[] args) { StreamWriter output = new StreamWriter("TriXTestSuite.txt"); Console.SetOut(output); Console.WriteLine("## TriX Test Suite"); Console.WriteLine(); try { foreach (String file in Directory.GetFiles("trix_tests")) { if (Path.GetExtension(file) == ".xml") { Console.WriteLine("## Testing File " + Path.GetFileName(file)); try { //Parse in TriXParser parser = new TriXParser(); TripleStore store = new TripleStore(); parser.Load(store, new StreamParams(file)); Console.WriteLine("# Parsed OK"); Console.WriteLine(); foreach (Triple t in store.Triples) { Console.WriteLine(t.ToString() + " from Graph <" + t.GraphUri.ToString() + ">"); } Console.WriteLine(); //Serialize out Console.WriteLine("# Attempting reserialization"); TriXWriter writer = new TriXWriter(); writer.Save(store, new StreamParams(file + ".out")); Console.WriteLine("# Serialized OK"); Console.WriteLine(); //Now Parse back in TripleStore store2 = new TripleStore(); parser.Load(store2, new StreamParams(file + ".out")); Console.WriteLine("# Parsed back in again"); if (store.Graphs.Count == store2.Graphs.Count) { Console.WriteLine("Correct number of Graphs"); } else { Console.WriteLine("Incorrect number of Graphs - Expected " + store.Graphs.Count + " - Actual " + store2.Graphs.Count); } if (store.Triples.Count() == store2.Triples.Count()) { Console.WriteLine("Correct number of Triples"); } else { Console.WriteLine("Incorrect number of Triples - Expected " + store.Triples.Count() + " - Actual " + store2.Triples.Count()); } } catch (RdfParseException parseEx) { HandleError("Parser Error", parseEx); } catch (RdfException rdfEx) { HandleError("RDF Error", rdfEx); } catch (Exception ex) { HandleError("Other Error", ex); } finally { Console.WriteLine(); } } } } catch (RdfParseException parseEx) { HandleError("Parser Error", parseEx); } catch (Exception ex) { HandleError("Other Error", ex); } finally { output.Close(); } }
public static SparqlResultSet Select(TripleStore store, string sparql) { return (SparqlResultSet)Execute(store, sparql); }
private void ImportStore(String filename) { try { IStoreReader storeparser = MimeTypesHelper.GetStoreParser(MimeTypesHelper.GetMimeType(Path.GetExtension(filename))); TripleStore store = new TripleStore(); storeparser.Load(store, new StreamParams(this.txtImportFile.Text)); SqlStoreWriter writer = new SqlStoreWriter(); writer.Save(store, new SqlIOParams(this._connection.Manager, true)); this.CrossThreadMessage("Successfully imported " + store.Graphs.Count + " Graphs into the Store", "Import Completed", MessageBoxIcon.Information); } catch (RdfStorageException storeEx) { this.CrossThreadMessage("An error occurred during import:\n" + storeEx.Message, "Import Failed", MessageBoxIcon.Error); } }
private int ProcessEvaluationTest(SparqlQueryParser parser, Triple commentDef, String queryFile, String dataFile, List<String> dataFiles, String resultFile) { Console.WriteLine("# Processing Evaluation Test " + Path.GetFileName(queryFile)); if (commentDef != null) { Console.WriteLine(commentDef.Object.ToString()); Console.WriteLine(); } if (dataFiles.Contains(dataFile)) dataFiles.Remove(dataFile); if (queryFile.StartsWith("file:///")) queryFile = queryFile.Substring(8); if (dataFile != null && dataFile.StartsWith("file:///")) dataFile = dataFile.Substring(8); if (resultFile.StartsWith("file:///")) resultFile = resultFile.Substring(8); Console.WriteLine("Query File is " + queryFile); if (evaluationTestOverride.Any(x => queryFile.EndsWith(x))) { Console.WriteLine(); Console.WriteLine("# Test Result = Manually overridden to Pass (Test Passed)"); testsPassed++; testsEvaluationPassed++; return 1; } if (dataFile != null) Console.WriteLine("Default Graph File is " + dataFile); foreach (String file in dataFiles) { Console.WriteLine("Uses Named Graph File " + file); } Console.WriteLine("Expected Result File is " + resultFile); Console.WriteLine(); SparqlQuery query; try { query = parser.ParseFromFile(queryFile); Console.WriteLine(query.ToString()); Console.WriteLine(); Console.WriteLine("Formatted with SparqlFormatter"); SparqlFormatter formatter = new SparqlFormatter(query.NamespaceMap); Console.WriteLine(formatter.Format(query)); Console.WriteLine(); try { Console.WriteLine(query.ToAlgebra().ToString()); Console.WriteLine(); } catch { //Do Nothing } } catch (RdfParseException parseEx) { this.ReportError("Query Parser Error", parseEx); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result = Unable to parse query (Test Failed)"); return -1; } IInMemoryQueryableStore store; if (dataFile != null) { store = new TripleStore(); } else { store = new WebDemandTripleStore(); } //Load Default Graph Graph defaultGraph = new Graph(); try { if (dataFile != null) { FileLoader.Load(defaultGraph, dataFile); } store.Add(defaultGraph); } catch (RdfParseException parseEx) { this.ReportError("Parser Error", parseEx); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result = Unable to parse Default Graph (Test Failed)"); return -1; } //Load Named Graphs try { foreach (String graphFile in dataFiles) { Graph namedGraph = new Graph(); if (graphFile.StartsWith("file:///")) { FileLoader.Load(namedGraph, graphFile.Substring(8)); } else { FileLoader.Load(namedGraph, graphFile); } store.Add(namedGraph); } } catch (RdfParseException parseEx) { this.ReportError("Parser Error", parseEx); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Unable to parse Named Graph (Test Failed)"); return -1; } //Create a Dataset and then Set Graphs InMemoryDataset dataset = new InMemoryDataset(store); if (!query.DefaultGraphs.Any()) { query.AddDefaultGraph(defaultGraph.BaseUri); //dataset.SetActiveGraph(defaultGraph.BaseUri); } if (!query.NamedGraphs.Any()) { foreach (String namedGraphUri in dataFiles) { query.AddNamedGraph(new Uri(namedGraphUri)); } } //Try and get the result Object results = null; try { results = query.Evaluate(dataset); } catch (RdfQueryException queryEx) { this.ReportError("Query Error", queryEx); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Query execution failed (Test Failed)"); return -1; } catch (Exception ex) { this.ReportError("Other Error", ex); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Query failed (Test Failed)"); return -1; } if (results == null) { testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - No result was returned from the Query (Test Failed)"); return -1; } //Load in the expected results if (results is SparqlResultSet) { //Save our Results so we can manually compare as needed SparqlResultSet ourResults = (SparqlResultSet)results; SparqlXmlWriter writer = new SparqlXmlWriter(); writer.Save(ourResults, resultFile + ".out"); SparqlResultSet expectedResults = new SparqlResultSet(); if (resultFile.EndsWith(".srx")) { try { SparqlXmlParser resultSetParser = new SparqlXmlParser(); resultSetParser.Load(expectedResults, resultFile); } catch (RdfParseException parseEx) { this.ReportError("Result Set Parser Error", parseEx); testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Error loading expected Result Set (Test Indeterminate)"); return 0; } } else if (resultFile.EndsWith(".ttl") || resultFile.EndsWith(".rdf")) { try { SparqlRdfParser resultSetParser = new SparqlRdfParser(); resultSetParser.Load(expectedResults, resultFile); } catch (RdfParseException parseEx) { this.ReportError("Result Set Parser Error", parseEx); testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Error loading expected Result Set (Test Indeterminate)"); return 0; } } else { testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Unable to load the expected Result Set (Test Indeterminate)"); return 0; } try { ourResults.Trim(); expectedResults.Trim(); if (ourResults.Equals(expectedResults)) { testsPassed++; testsEvaluationPassed++; Console.WriteLine("# Test Result - Result Set as expected (Test Passed)"); return 1; } else { Console.WriteLine("Final Query"); Console.WriteLine(query.ToString()); Console.WriteLine(); this.ShowTestData(store); this.ShowResultSets(ourResults, expectedResults); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Result Set not as expected (Test Failed)"); return -1; } } catch (NotImplementedException) { this.ShowResultSets(ourResults, expectedResults); testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Unable to establish if Result Set was as expected (Test Indeterminate)"); return 0; } } else if (results is Graph) { if (resultFile.EndsWith(".ttl")) { //Save our Results so we can manually compare as needed Graph ourResults = (Graph)results; CompressingTurtleWriter writer = new CompressingTurtleWriter(); writer.Save(ourResults, resultFile + ".out"); try { Graph expectedResults = new Graph(); TurtleParser ttlparser = new TurtleParser(); ttlparser.Load(expectedResults, resultFile); try { if (ourResults.Equals(expectedResults)) { testsPassed++; testsEvaluationPassed++; Console.WriteLine("# Test Result - Graph as expected (Test Passed)"); return 1; } else { this.ShowTestData(store); this.ShowGraphs(ourResults, expectedResults); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Graph not as expected (Test Failed)"); return -1; } } catch (NotImplementedException) { this.ShowGraphs(ourResults, expectedResults); testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Unable to establish if Graph was as expected (Test Indeterminate)"); return 0; } } catch (RdfParseException parseEx) { this.ReportError("Graph Parser Error", parseEx); testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Error loading expected Graph (Test Indeterminate)"); return 0; } } else { testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Unable to load expected Graph (Test Indeterminate)"); return 0; } } else { testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Didn't produce a Graph as expected (Test Failed)"); return -1; } }
public static IGraph Construct(TripleStore store, string sparql) { return (IGraph)Execute(store, sparql); }
public FakeTripleStoreRepository(Dictionary <string, string> graphs) { _store = CreateNewTripleStore(graphs); _dataset = new InMemoryDataset(_store); }
static void AddExistingItems(IGraph graph, IDictionary<string, IGraph> items) { TripleStore store = new TripleStore(); store.Add(graph, true); string inlinePackageSparql = Utils.GetResource("sparql.SelectInlinePackage.rq"); SparqlResultSet rows = SparqlHelpers.Select(store, inlinePackageSparql); foreach (SparqlResult row in rows) { string packageUri = ((IUriNode)row["catalogPackage"]).Uri.AbsoluteUri; string jsonFileName = packageUri.Substring(packageUri.LastIndexOf("/") + 1); //If items already has that version, then skip it //Add only the new ones if (!existingVersionsWithID.Contains(jsonFileName)) { items[packageUri] = graph; } } }
public void SparqlOrderByComplexLazyPerformance() { String query = "SELECT * WHERE { ?s ?p ?o . } ORDER BY ?s DESC(?p) LIMIT 5"; TripleStore store = new TripleStore(); Graph g = new Graph(); FileLoader.Load(g, "dataset_50.ttl.gz"); store.Add(g); SparqlQueryParser parser = new SparqlQueryParser(); //First do with Optimisation Stopwatch timer = new Stopwatch(); SparqlQuery q = parser.ParseFromString(query); Console.WriteLine(q.ToAlgebra().ToString()); Assert.IsTrue(q.ToAlgebra().ToString().Contains("LazyBgp"), "Should have been optimised to use a Lazy BGP"); Console.WriteLine(); timer.Start(); LeviathanQueryProcessor processor = new LeviathanQueryProcessor(AsDataset(store)); Object results = processor.ProcessQuery(q); timer.Stop(); Console.WriteLine("Took " + timer.Elapsed + " to execute when Optimised"); timer.Reset(); if (results is SparqlResultSet) { SparqlResultSet rset = (SparqlResultSet)results; foreach (SparqlResult r in rset) { Console.WriteLine(r.ToString()); } Assert.IsTrue(rset.Count == 5, "Expected exactly 5 results"); } else { Assert.Fail("Expected a SPARQL Result Set"); } //Then do without optimisation Options.AlgebraOptimisation = false; timer.Start(); results = processor.ProcessQuery(q); timer.Stop(); Console.WriteLine("Took " + timer.Elapsed + " to execute when Unoptimised"); if (results is SparqlResultSet) { SparqlResultSet rset = (SparqlResultSet)results; foreach (SparqlResult r in rset) { Console.WriteLine(r.ToString()); } Assert.IsTrue(rset.Count == 5, "Expected exactly 5 results"); } else { Assert.Fail("Expected a SPARQL Result Set"); } Options.AlgebraOptimisation = true; }