public StorageContent CreateContent(CatalogContext context) { IGraph graph = new Graph(); INode rdfTypePredicate = graph.CreateUriNode(Schema.Predicates.Type); INode timeStampPredicate = graph.CreateUriNode(Schema.Predicates.CatalogTimestamp); INode commitIdPredicate = graph.CreateUriNode(Schema.Predicates.CatalogCommitId); INode container = graph.CreateUriNode(_resourceUri); graph.Assert(container, rdfTypePredicate, graph.CreateUriNode(GetContainerType())); graph.Assert(container, timeStampPredicate, graph.CreateLiteralNode(_timeStamp.ToString("O"), Schema.DataTypes.DateTime)); graph.Assert(container, commitIdPredicate, graph.CreateLiteralNode(_commitId.ToString())); if (_parent != null) { graph.Assert(container, graph.CreateUriNode(Schema.Predicates.Parent), graph.CreateUriNode(_parent)); } AddCustomContent(container, graph); INode itemPredicate = graph.CreateUriNode(Schema.Predicates.CatalogItem); INode countPredicate = graph.CreateUriNode(Schema.Predicates.CatalogCount); foreach (KeyValuePair<Uri, CatalogContainerItem> item in _items) { INode itemNode = graph.CreateUriNode(item.Key); graph.Assert(container, itemPredicate, itemNode); graph.Assert(itemNode, rdfTypePredicate, graph.CreateUriNode(item.Value.Type)); if (item.Value.PageContent != null) { graph.Merge(item.Value.PageContent); } graph.Assert(itemNode, timeStampPredicate, graph.CreateLiteralNode(item.Value.TimeStamp.ToString("O"), Schema.DataTypes.DateTime)); graph.Assert(itemNode, commitIdPredicate, graph.CreateLiteralNode(item.Value.CommitId.ToString())); if (item.Value.Count != null) { graph.Assert(itemNode, countPredicate, graph.CreateLiteralNode(item.Value.Count.ToString(), Schema.DataTypes.Integer)); } } JObject frame = context.GetJsonLdContext("context.Container.json", GetContainerType()); // The below code could be used to compact data storage by using relative URIs. //frame = (JObject)frame.DeepClone(); //frame["@context"]["@base"] = _resourceUri.ToString(); StorageContent content = new StringStorageContent(Utils.CreateJson(graph, frame), "application/json", "no-store"); return content; }
public string CreateContent(CatalogContext context) { IGraph graph = new Graph(); graph.NamespaceMap.AddNamespace("rdf", new Uri("http://www.w3.org/1999/02/22-rdf-syntax-ns#")); graph.NamespaceMap.AddNamespace("catalog", new Uri("http://nuget.org/catalog#")); INode rdfTypePredicate = graph.CreateUriNode("rdf:type"); INode timeStampPredicate = graph.CreateUriNode("catalog:timeStamp"); Uri dateTimeDatatype = new Uri("http://www.w3.org/2001/XMLSchema#dateTime"); INode container = graph.CreateUriNode(_resourceUri); graph.Assert(container, rdfTypePredicate, graph.CreateUriNode(GetContainerType())); graph.Assert(container, timeStampPredicate, graph.CreateLiteralNode(_timeStamp.ToString(), dateTimeDatatype)); if (_parent != null) { graph.Assert(container, graph.CreateUriNode("catalog:parent"), graph.CreateUriNode(_parent)); } AddCustomContent(container, graph); INode itemPredicate = graph.CreateUriNode("catalog:item"); INode countPredicate = graph.CreateUriNode("catalog:count"); foreach (KeyValuePair<Uri, Tuple<Uri, IGraph, DateTime, int?>> item in GetItems()) { INode itemNode = graph.CreateUriNode(item.Key); graph.Assert(container, itemPredicate, itemNode); graph.Assert(itemNode, rdfTypePredicate, graph.CreateUriNode(item.Value.Item1)); if (item.Value.Item2 != null) { graph.Merge(item.Value.Item2); } graph.Assert(itemNode, timeStampPredicate, graph.CreateLiteralNode(item.Value.Item3.ToString(), dateTimeDatatype)); if (item.Value.Item4 != null) { Uri integerDatatype = new Uri("http://www.w3.org/2001/XMLSchema#integer"); graph.Assert(itemNode, countPredicate, graph.CreateLiteralNode(item.Value.Item4.ToString(), integerDatatype)); } } JObject frame = context.GetJsonLdContext("context.Container.json", GetContainerType()); string content = Utils.CreateJson(graph, frame); return content; }
public void GraphMerging() { try { //Load the Test RDF TurtleParser ttlparser = new TurtleParser(); Graph g = new Graph(); Graph h = new Graph(); Assert.NotNull(g); Assert.NotNull(h); ttlparser.Load(g, "resources\\MergePart1.ttl"); ttlparser.Load(h, "resources\\MergePart2.ttl"); Console.WriteLine("Merge Test Data Loaded OK"); Console.WriteLine(); Console.WriteLine("Graph 1 Contains"); foreach (Triple t in g.Triples) { Console.WriteLine(t.ToString()); } Console.WriteLine(); Console.WriteLine("Graph 2 Contains"); foreach (Triple t in h.Triples) { Console.WriteLine(t.ToString()); } Console.WriteLine(); Console.WriteLine("Attempting Graph Merge"); g.Merge(h); Console.WriteLine(); foreach (Triple t in g.Triples) { Console.WriteLine(t.ToString()); } Assert.Equal(8, g.Triples.Count); //Same merge into an Empty Graph Console.WriteLine(); Console.WriteLine("Combining the two Graphs with two Merge operations into an Empty Graph"); Graph i = new Graph(); //Need to reload g from disk g = new Graph(); ttlparser.Load(g, "resources\\MergePart1.ttl"); //Do the actual merge i.Merge(g); i.Merge(h); Console.WriteLine(); foreach (Triple t in i.Triples) { Console.WriteLine(t.ToString()); } Assert.Equal(8, i.Triples.Count); } catch (Exception ex) { throw; } }
/// <summary> /// Выполнение Sparql-запроса /// </summary> /// <param name="sparqlCommandText">Текст Sparql-запроса</param> /// <returns>Результат запроса в виде текста</returns> public string ExecuteQuery(string sparqlCommandText) { using (Graph baseGraph = new Graph()) { RdfXmlParser fileParser = new RdfXmlParser(); foreach (string fileName in FilesToQuery) { if (String.IsNullOrWhiteSpace(fileName)) continue; using (Graph g = new Graph()) { try { fileParser.Load(g, fileName); baseGraph.Merge(g); } catch (Exception ex) { throw new Exception(String.Format("Ошибка при обработке файла {0}\r\n{1}",fileName, ex.Message), ex); } } } var resultSet = baseGraph.ExecuteQuery(sparqlCommandText); if (resultSet is SparqlResultSet) { SparqlResultSet outputSet = resultSet as SparqlResultSet; if (outputSet.IsEmpty) QueryResult = "Пустой результат"; else { StringBuilder outputString = new StringBuilder(); foreach (SparqlResult result in outputSet.Results) outputString.AppendLine(result.ToString()); QueryResult = outputString.ToString(); } } else if (resultSet is Graph) { Graph resultGraph = resultSet as Graph; if (resultGraph.IsEmpty) QueryResult = "Пустой граф"; else QueryResult = VDS.RDF.Writing.StringWriter.Write(resultGraph, new RdfXmlWriter()); } else { QueryResult = string.Format("Неизвестный результат: {0}", resultSet.GetType()); } return QueryResult; } }
public static void Main(string[] args) { StreamWriter output = new StreamWriter("MergeTest.txt"); try { //Set Output Console.SetOut(output); Console.WriteLine("## Merge Test Suite"); //Load the Test RDF TurtleParser ttlparser = new TurtleParser(); Graph g = new Graph(); Graph h = new Graph(); ttlparser.Load(g, "MergePart1.ttl"); ttlparser.Load(h, "MergePart2.ttl"); Console.WriteLine("Merge Test Data Loaded OK"); Console.WriteLine(); Console.WriteLine("Graph 1 Contains"); foreach (Triple t in g.Triples) { Console.WriteLine(t.ToString()); } Console.WriteLine(); Console.WriteLine("Graph 2 Contains"); foreach (Triple t in h.Triples) { Console.WriteLine(t.ToString()); } Console.WriteLine(); Console.WriteLine("Attempting Graph Merge"); g.Merge(h); Console.WriteLine(); foreach (Triple t in g.Triples) { Console.WriteLine(t.ToString()); } //Use a GraphViz Generator to picture this Console.WriteLine(); Console.WriteLine("Visualizing Merged Graph as SVG with GraphViz"); GraphVizGenerator gvzgen = new GraphVizGenerator("svg"); gvzgen.Generate(g, "MergeTest.svg", false); Console.WriteLine("Visualisation created as MergeTest.svg"); //Same merge into an Empty Graph Console.WriteLine(); Console.WriteLine("Combining the two Graphs with two Merge operations into an Empty Graph"); Graph i = new Graph(); //Need to reload g from disk g = new Graph(); ttlparser.Load(g, "MergePart1.ttl"); //Do the actual merge i.Merge(g); i.Merge(h); Console.WriteLine(); foreach (Triple t in i.Triples) { Console.WriteLine(t.ToString()); } } catch (Exception ex) { Console.WriteLine(ex.Message); Console.WriteLine(ex.StackTrace); } output.Close(); }
protected async Task SaveIndexResource(Uri resourceUri, Uri typeUri, Guid commitId, DateTime commitTimeStamp, IDictionary<string, CatalogItemSummary> entries, Uri parent, IGraph extra, Uri[] additionalResourceTypes , CancellationToken cancellationToken) { IGraph graph = new Graph(); INode resourceNode = graph.CreateUriNode(resourceUri); INode itemPredicate = graph.CreateUriNode(Schema.Predicates.CatalogItem); INode typePredicate = graph.CreateUriNode(Schema.Predicates.Type); INode timeStampPredicate = graph.CreateUriNode(Schema.Predicates.CatalogTimeStamp); INode commitIdPredicate = graph.CreateUriNode(Schema.Predicates.CatalogCommitId); INode countPredicate = graph.CreateUriNode(Schema.Predicates.CatalogCount); graph.Assert(resourceNode, typePredicate, graph.CreateUriNode(typeUri)); graph.Assert(resourceNode, commitIdPredicate, graph.CreateLiteralNode(commitId.ToString())); graph.Assert(resourceNode, timeStampPredicate, graph.CreateLiteralNode(commitTimeStamp.ToString("O"), Schema.DataTypes.DateTime)); graph.Assert(resourceNode, countPredicate, graph.CreateLiteralNode(entries.Count.ToString(), Schema.DataTypes.Integer)); foreach (KeyValuePair<string, CatalogItemSummary> itemEntry in entries) { INode itemNode = graph.CreateUriNode(new Uri(itemEntry.Key)); graph.Assert(resourceNode, itemPredicate, itemNode); graph.Assert(itemNode, typePredicate, graph.CreateUriNode(itemEntry.Value.Type)); graph.Assert(itemNode, commitIdPredicate, graph.CreateLiteralNode(itemEntry.Value.CommitId.ToString())); graph.Assert(itemNode, timeStampPredicate, graph.CreateLiteralNode(itemEntry.Value.CommitTimeStamp.ToString("O"), Schema.DataTypes.DateTime)); if (itemEntry.Value.Count != null) { graph.Assert(itemNode, countPredicate, graph.CreateLiteralNode(itemEntry.Value.Count.ToString(), Schema.DataTypes.Integer)); } if (itemEntry.Value.Content != null) { graph.Merge(itemEntry.Value.Content, true); } } if (parent != null) { graph.Assert(resourceNode, graph.CreateUriNode(Schema.Predicates.CatalogParent), graph.CreateUriNode(parent)); } if (extra != null) { graph.Merge(extra, true); } if (additionalResourceTypes != null) { foreach (Uri resourceType in additionalResourceTypes) { graph.Assert(resourceNode, typePredicate, graph.CreateUriNode(resourceType)); } } await SaveGraph(resourceUri, graph, typeUri, cancellationToken); }
public void RunConvert(String[] args) { //Set the Options if (!this.SetOptions(args)) { //If SetOptions returns false then some options were invalid and errors have been output to the error stream return; } //Then we'll read in our inputs foreach (String input in this._inputs) { try { Graph g = new Graph(); if (input.StartsWith("-uri:")) { UriLoader.Load(g, new Uri(input.Substring(input.IndexOf(':') + 1))); } else { FileLoader.Load(g, input); } //If not merging we'll output now if (!this._merge) { String destFile; if (input.StartsWith("-uri:")) { if (this._inputs.Count == 1) { //For a single URI input we require a Filename if (this._output.Equals(String.Empty)) { Console.Error.WriteLine("rdfConvert: When converting a single URI you must specify an output file with the -out:filename argument"); return; } destFile = Path.GetFileNameWithoutExtension(this._output) + this._outExt; } else { //For multiple inputs where some are URIs the output file is the SHA256 hash of the URI plus the extension destFile = new Uri(input.Substring(input.IndexOf(':') + 1)).GetSha256Hash() + this._outExt; } } else { if (this._inputs.Count == 1 && !this._output.Equals(String.Empty)) { //For a single input we'll just change the extension as appropriate if (!this._outExt.Equals(String.Empty)) { destFile = Path.GetFileNameWithoutExtension(this._output) + this._outExt; } else { destFile = this._output; } } else { destFile = Path.GetFileNameWithoutExtension(input) + this._outExt; } } if (File.Exists(destFile) && !this._overwrite) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' because a file already exists at this location and the -overwrite argument was not used"); } else { try { this._writer.Save(g, destFile); } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } } else { //Add to the Store and we'll merge it all together later and output it at the end this._store.Add(g); } } catch (RdfParserSelectionException parseEx) { //If this happens then this may be a datset instead of a graph try { if (input.StartsWith("-uri:")) { UriLoader.Load(this._store, new Uri(input.Substring(input.IndexOf(':') + 1))); } else { FileLoader.Load(this._store, input); } //If not merging we'll output now if (!this._merge) { foreach (IGraph g in this._store.Graphs) { String destFile = (g.BaseUri == null) ? "default-graph" : g.BaseUri.GetSha256Hash(); destFile += this._outExt; if (File.Exists(destFile)) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' because a file already exists at this location and the -overwrite argument was not used"); } else { try { this._writer.Save(g, destFile); } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } } //Reset the Triple Store after outputting this._store.Dispose(); this._store = new TripleStore(); } } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to read from input '" + input + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to read from input '" + input + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } //Then we'll apply merging if applicable //If merge was false then we've already done the outputting as we had no need to keep //stuff in memory if (this._merge) { if (this._storeWriter != null && (this._writer == null || this._dataset)) { //We only have a StoreWriter so we output a Dataset rather than merging if (!this._output.Equals(String.Empty)) { if (File.Exists(this._output) && !this._overwrite) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + this._output + "' because a file already exists at this location and the -overwrite argument was not used"); } else { try { this._storeWriter.Save(this._store, new VDS.RDF.Storage.Params.StreamParams(this._output)); } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + this._output + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } } else { String destFile = (this._inputs.Count == 1 && !this._inputs[0].StartsWith("-uri:")) ? Path.GetFileNameWithoutExtension(this._inputs[0]) + this._outExt : "dataset" + this._outExt; if (File.Exists(destFile) && !this._overwrite) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' because a file already exists at this location and the -overwrite argument was not used"); } else { try { this._storeWriter.Save(this._store, new VDS.RDF.Storage.Params.StreamParams(destFile)); } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } } } else { //Merge all the Graphs together and produce a single Graph Graph mergedGraph = new Graph(); foreach (IGraph g in this._store.Graphs) { mergedGraph.Merge(g); } //Work out the output file and output the Graph String destFile; if (!this._output.Equals(String.Empty)) { destFile = this._output; } else { destFile = "merged-graph" + this._outExt; } if (File.Exists(destFile) && !this._overwrite) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' because a file already exists at this location and the -overwrite argument was not used"); } else { try { this._writer.Save(mergedGraph, destFile); } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Unable to output to '" + destFile + "' due to the following error:"); Console.Error.WriteLine("rdfConvert: Error: " + ex.Message); if (this._debug) this.DebugErrors(ex); } } } } }
public override StorageContent CreateContent(CatalogContext context) { IGraph catalogEntry = new Graph(); INode catalogEntrySubject = catalogEntry.CreateUriNode(GetItemAddress()); // catalog infrastructure fields catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.Type), catalogEntry.CreateUriNode(GetItemType())); catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.Type), catalogEntry.CreateUriNode(_itemType)); catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.Type), catalogEntry.CreateUriNode(Schema.DataTypes.Permalink)); catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.CatalogTimeStamp), catalogEntry.CreateLiteralNode(TimeStamp.ToString("O"), Schema.DataTypes.DateTime)); catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.CatalogCommitId), catalogEntry.CreateLiteralNode(CommitId.ToString())); catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.Published), catalogEntry.CreateLiteralNode(_publicationDetails.Published.ToString("O"), Schema.DataTypes.DateTime)); catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.TenantId), catalogEntry.CreateLiteralNode(_publicationDetails.TenantId)); catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.Tenant), catalogEntry.CreateLiteralNode(_publicationDetails.TenantName)); Uri ownerUri = _publicationDetails.Owner.GetUri(GetItemAddress()); INode ownerSubject = catalogEntry.CreateUriNode(ownerUri); catalogEntry.Assert(ownerSubject, catalogEntry.CreateUriNode(Schema.Predicates.NameIdentifier), catalogEntry.CreateLiteralNode(_publicationDetails.Owner.NameIdentifier)); //catalogEntry.Assert(ownerSubject, catalogEntry.CreateUriNode(Schema.Predicates.Name), catalogEntry.CreateLiteralNode(_publicationDetails.Owner.Name)); catalogEntry.Assert(ownerSubject, catalogEntry.CreateUriNode(Schema.Predicates.GivenName), catalogEntry.CreateLiteralNode(_publicationDetails.Owner.GivenName)); catalogEntry.Assert(ownerSubject, catalogEntry.CreateUriNode(Schema.Predicates.Surname), catalogEntry.CreateLiteralNode(_publicationDetails.Owner.Surname)); //catalogEntry.Assert(ownerSubject, catalogEntry.CreateUriNode(Schema.Predicates.Iss), catalogEntry.CreateLiteralNode(_publicationDetails.Owner.Iss)); catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.Owner), ownerSubject); // visibility catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.Visibility), catalogEntry.CreateLiteralNode(_publicationDetails.Visibility.Visibility.ToString())); switch (_publicationDetails.Visibility.Visibility) { case PublicationVisibility.VisibilityScope.Organization: catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.Organization), catalogEntry.CreateLiteralNode(_publicationDetails.Visibility.Organization)); break; case PublicationVisibility.VisibilityScope.Subscription: catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.Subscription), catalogEntry.CreateLiteralNode(_publicationDetails.Visibility.Subscription)); break; } // listed catalogEntry.Assert(catalogEntrySubject, catalogEntry.CreateUriNode(Schema.Predicates.Listed), catalogEntry.CreateLiteralNode(_isListed.ToString(), Schema.DataTypes.Boolean)); // add the nuspec metadata Uri nuspecSubject = _nuspec["@id"].ToObject<Uri>(); IGraph nuspecGraph = Utils.CreateGraph(_nuspec); // Any statements made about this @id in the nuspec we want to make about the catalog items @id // - catalog readers can then apply this logic in reverse // - by so doing the catalog entry becomes an audit entry for the data catalogEntry.Merge(nuspecGraph, false); foreach (Triple triple in catalogEntry.GetTriplesWithSubject(catalogEntry.CreateUriNode(nuspecSubject))) { catalogEntry.Assert(catalogEntrySubject, triple.Predicate.CopyNode(catalogEntry), triple.Object.CopyNode(catalogEntry)); } GraphHelpers.MaterializeInference(catalogEntry); SetIdVersionFromGraph(catalogEntry); // create JSON content string json = Utils.CreateJson(catalogEntry, _context); StorageContent content = new StringStorageContent(json, "application/json", "no-store"); return content; }
private static void ProcessInclude(INode n, Graph g) { if (n is UriNode) { Console.WriteLine("Loading manifest file: {0}", (n as UriNode).Uri.LocalPath); var includeGraph = new Graph(); FileLoader.Load(includeGraph, (n as UriNode).Uri.LocalPath); g.Merge(includeGraph); } }