/// <summary> /// Saves a RDF Dataset as GZipped output. /// </summary> /// <param name="store">Store to save.</param> /// <param name="output">Writer to save to.</param> public void Save(ITripleStore store, TextWriter output) { if (store == null) { throw new RdfOutputException("Cannot output a null Triple Store"); } if (output == null) { throw new RdfOutputException("Cannot output to a null writer"); } if (output is StreamWriter) { StreamWriter writer = (StreamWriter)output; if (writer.BaseStream is GZipStream) { _writer.Save(store, writer); } else { _writer.Save(store, new StreamWriter(new GZipStream(writer.BaseStream, CompressionMode.Compress))); } } else { throw new RdfOutputException("GZip Dataset Writers can only write to StreamWriter instances"); } }
/// <summary> /// Writes the given Triple Store to a String and returns the output in your chosen concrete RDF dataset syntax /// </summary> /// <param name="store">Triple Store</param> /// <param name="writer">Writer to use to generate conrete RDF Syntax</param> /// <returns></returns> public static String Write(ITripleStore store, IStoreWriter writer) { System.IO.StringWriter sw = new System.IO.StringWriter(); writer.Save(store, sw); return(sw.ToString()); }
/// <summary> /// Writes the given Triple Store to a String and returns the output in your chosen concrete RDF dataset syntax /// </summary> /// <param name="store">Triple Store</param> /// <param name="writer">Writer to use to generate conrete RDF Syntax</param> /// <returns></returns> public static String Write(ITripleStore store, IStoreWriter writer) { System.IO.StringWriter sw = new System.IO.StringWriter(); writer.Save(store, new TextWriterParams(sw)); return sw.ToString(); }
public void Save(int id, string message) { writer.Save(id, message); // Decorator => call FileStore var m = new Maybe <string>(message); cache.AddOrUpdate(id, m, (i, s) => m); }
public static void SaveToFile(this ITripleStore store, string fileName) { using (var output = new StreamWriter(fileName)) { IStoreWriter writer = MimeTypesHelper.GetStoreWriterByFileExtension(MimeTypesHelper.GetTrueFileExtension(fileName)); writer.Save(store, output); } }
public static void Save(this IStoreWriter writer, ITripleStore store, string filename) { using (var output = new StreamWriter(filename)) { writer.Save(store, output); output.Flush(); output.Close(); } }
private void WriteStore() { if (_filePath != null) { _watcher.EnableRaisingEvents = false; using (var fileStream = new FileStream(_filePath, FileMode.Open, FileAccess.Write, FileShare.Read)) using (var streamWriter = new StreamWriter(fileStream, System.Text.UTF8Encoding.UTF8)) { _storeWriter.Save(this, streamWriter); } _watcher.EnableRaisingEvents = true; } else { _fileStream.SetLength(0); TextWriter fileWriter = new StreamWriter(_fileStream, System.Text.UTF8Encoding.UTF8, 4096, true); _storeWriter.Save(this, fileWriter); _fileStream.Flush(); } }
private void TestWriter(IStoreWriter writer, IStoreReader reader, bool useMultiThreaded, int compressionLevel) { TripleStore store = new TripleStore(); Graph g = new Graph(); g.LoadFromEmbeddedResource("VDS.RDF.Configuration.configuration.ttl"); g.BaseUri = null; store.Add(g); g = new Graph(); g.LoadFromFile("resources\\InferenceTest.ttl"); g.BaseUri = new Uri("http://example.org/graph"); store.Add(g); g = new Graph(); g.LoadFromFile(@"resources\cyrillic.rdf"); g.BaseUri = new Uri("http://example.org/cyrillic"); store.Add(g); if (writer is ICompressingWriter) { ((ICompressingWriter)writer).CompressionLevel = compressionLevel; } #if !NETCOREAPP2_0 if (writer is IMultiThreadedWriter) { ((IMultiThreadedWriter)writer).UseMultiThreadedWriting = useMultiThreaded; } #endif System.IO.StringWriter strWriter = new System.IO.StringWriter(); writer.Save(store, strWriter); Console.WriteLine(strWriter.ToString()); Assert.NotEqual(strWriter.ToString(), String.Empty); TripleStore store2 = new TripleStore(); reader.Load(store2, new System.IO.StringReader(strWriter.ToString())); foreach (IGraph graph in store.Graphs) { Assert.True(store2.HasGraph(graph.BaseUri), "Parsed Stored should have contained serialized graph"); Assert.Equal(graph, store2[graph.BaseUri]); } }
private void TestWriter(IStoreWriter writer, IStoreReader reader, bool useMultiThreaded, int compressionLevel) { TripleStore store = new TripleStore(); Graph g = new Graph(); g.LoadFromEmbeddedResource("VDS.RDF.Configuration.configuration.ttl"); g.BaseUri = null; store.Add(g); g = new Graph(); g.LoadFromFile("InferenceTest.ttl"); g.BaseUri = new Uri("http://example.org/graph"); store.Add(g); if (writer is ICompressingWriter) { ((ICompressingWriter)writer).CompressionLevel = compressionLevel; } if (writer is IMultiThreadedWriter) { ((IMultiThreadedWriter)writer).UseMultiThreadedWriting = useMultiThreaded; } System.IO.StringWriter strWriter = new System.IO.StringWriter(); writer.Save(store, strWriter); Console.WriteLine(strWriter.ToString()); Assert.IsFalse(strWriter.ToString().Equals(String.Empty)); TripleStore store2 = new TripleStore(); reader.Load(store2, new System.IO.StringReader(strWriter.ToString())); foreach (IGraph graph in store.Graphs) { Assert.IsTrue(store2.HasGraph(graph.BaseUri), "Parsed Stored should have contained serialized graph"); Assert.AreEqual(graph, store2[graph.BaseUri], "Parsed Graph should be equal to original graph"); } }
/// <inheritdoc /> public void Save(IGraph g, string filename) { _storeWriter.Save(g.AsTripleStore(), filename); }
public void Setup() { this._g = new Graph(); this._g.LoadFromEmbeddedResource("VDS.RDF.Configuration.configuration.ttl"); this._results = this._g.ExecuteQuery("SELECT * WHERE { ?s ?p ?o }") as SparqlResultSet; foreach (MimeTypeDefinition def in MimeTypesHelper.Definitions) { // Omit CSV since that is a lossy format that does not round trip if (def.CanonicalMimeType.Equals("text/csv")) { continue; } if (def.CanWriteRdf && def.CanParseRdf) { IRdfWriter writer = def.GetRdfWriter(); bool isManual = !def.CanonicalFileExtension.EndsWith(".gz"); String filename = "gzip-tests" + (isManual ? String.Empty : "-auto") + "." + def.CanonicalFileExtension + (isManual ? ".gz" : String.Empty); if (isManual) { using (StreamWriter output = new StreamWriter(new GZipStream(new FileStream(filename, FileMode.Create, FileAccess.Write), CompressionMode.Compress))) { writer.Save(this._g, output); output.Close(); } this._manualTestFiles.Add(filename); } else { writer.Save(this._g, filename); this._autoTestFiles.Add(filename); } } else if (def.CanParseRdfDatasets && def.CanWriteRdfDatasets) { IStoreWriter writer = def.GetRdfDatasetWriter(); bool isManual = !def.CanonicalFileExtension.EndsWith(".gz"); String filename = "gzip-tests-datasets" + (isManual ? String.Empty : "-auto") + "." + def.CanonicalFileExtension + (isManual ? ".gz" : String.Empty); TripleStore store = new TripleStore(); store.Add(this._g); if (isManual) { using (Stream output = new GZipStream(new FileStream(filename, FileMode.Create, FileAccess.Write), CompressionMode.Compress)) { writer.Save(store, new StreamWriter(output)); output.Close(); } this._manualDatasetTestFiles.Add(filename); } else { writer.Save(store, new StreamWriter(new FileStream(filename, FileMode.Create, FileAccess.Write))); this._autoDatasetTestFiles.Add(filename); } } else if (def.CanParseSparqlResults && def.CanWriteSparqlResults) { ISparqlResultsWriter writer = def.GetSparqlResultsWriter(); bool isManual = !def.CanonicalFileExtension.EndsWith(".gz"); String filename = "gzip-tests-results" + (isManual ? String.Empty : "-auto") + "." + def.CanonicalFileExtension + (isManual ? ".gz" : String.Empty); if (isManual) { using (StreamWriter output = new StreamWriter(new GZipStream(new FileStream(filename, FileMode.Create, FileAccess.Write), CompressionMode.Compress))) { writer.Save(this._results, output); output.Close(); } this._manualResultsTestFiles.Add(filename); } else { writer.Save(this._results, new StreamWriter(filename)); this._autoResultsTestFiles.Add(filename); } } } }
/// <summary> /// Runs the task /// </summary> /// <returns></returns> protected override TaskResult RunTaskInternal() { MimeTypeDefinition def = MimeTypesHelper.GetDefinitionsByFileExtension(MimeTypesHelper.GetTrueFileExtension(this._file)).FirstOrDefault(d => d.CanWriteRdfDatasets); if (def == null) { throw new RdfOutputException("Cannot Export the Store to the selected File since dotNetRDF was unable to select a writer to use based on the File Extension"); } IStoreWriter writer = def.GetRdfDatasetWriter(); if (writer is IMultiThreadedWriter) { ((IMultiThreadedWriter)writer).UseMultiThreadedWriting = false; } TripleStore store = new TripleStore(); if (writer is TriXWriter) { //For TriX must load all into memory and then write out all at once foreach (Uri u in this.ListGraphs()) { Graph g = new Graph(); this._manager.LoadGraph(g, u); g.BaseUri = u; store.Add(g); this.Information = "Loading into memory prior to export, loaded " + store.Graphs.Sum(x => x.Triples.Count) + " Triple(s) in " + store.Graphs.Count + " Graph(s) so far..."; if (this.HasBeenCancelled) { this.Information = "Export Cancelled"; return(new TaskResult(true)); } } this.Information = "Exporting Data all at once, have " + store.Graphs.Sum(x => x.Triples.Count) + " Triple(s) in " + store.Graphs.Count + " Graph(s) to export..."; writer.Save(store, new StreamWriter(this._file)); this.Information = "Exported " + store.Graphs.Sum(x => x.Triples.Count) + " Triple(s) in " + store.Graphs.Count + " Graph(s)"; } else { if (File.Exists(this._file)) { File.Delete(this._file); } //For non-TriX formats assume it is safe to append one Graph at a time to the file int graphCount = 0, tripleCount = 0; foreach (Uri u in this.ListGraphs()) { using (FileStream stream = new FileStream(this._file, FileMode.Append)) { if (writer is IFormatterBasedWriter) { //Stream via a WriteThroughHandler this.Information = "Stream Exporting Graph " + (u != null ? u.AbsoluteUri : "Default"); WriteThroughHandler handler = new WriteThroughHandler(((IFormatterBasedWriter)writer).TripleFormatterType, new StreamWriter(stream), true); ExportProgressHandler progHandler = new ExportProgressHandler(handler, this, tripleCount); this._manager.LoadGraph(progHandler, u); graphCount++; tripleCount = progHandler.TripleCount; this.Information = "Finished Stream Exporting Graph " + (u != null ? u.AbsoluteUri : "Default") + ", exported " + tripleCount + " Triple(s) in " + graphCount + " Graph(s) so far..."; } else { //Load Graph into memory Graph g = new Graph(); g.BaseUri = u; this.Information = "Loading Graph " + (u != null ? u.AbsoluteUri : "Default"); this._manager.LoadGraph(g, u); g.BaseUri = u; if (this.HasBeenCancelled) { stream.Close(); this.Information = "Export Cancelled, exported " + tripleCount + " Triple(s) in " + graphCount + " Graph(s)"; return(new TaskResult(true)); } graphCount++; tripleCount += g.Triples.Count; //Save it store.Add(g); writer.Save(store, new StreamWriter(stream, def.Encoding)); store.Remove(u); this.Information = "Exporting Data graph by graph, exported " + tripleCount + " Triple(s) in " + graphCount + " Graph(s) so far..."; } //Check for cancellation if (this.HasBeenCancelled) { stream.Close(); this.Information = "Export Cancelled, exported " + tripleCount + " Triple(s) in " + graphCount + " Graph(s)"; return(new TaskResult(true)); } } } this.Information = "Exported " + tripleCount + " Triple(s) in " + graphCount + " Graph(s)"; } return(new TaskResult(true)); }
public void ProcessRequest(HttpContext context) { //Turn on Response Buffering context.Response.Buffer = true; //Prepare the Cache Directories if (!Path.IsPathRooted(this._cacheDir)) { this._cacheDir = context.Server.MapPath(this._cacheDir); } if (this._loader == null) { this._loader = new ExpansionLoader(this._cacheDir); } //Add our Custom Headers try { context.Response.Headers.Add("X-dotNetRDF-Version", Assembly.GetAssembly(typeof(VDS.RDF.IGraph)).GetName().Version.ToString()); } catch (PlatformNotSupportedException) { context.Response.AddHeader("X-dotNetRDF-Version", Assembly.GetAssembly(typeof(VDS.RDF.IGraph)).GetName().Version.ToString()); } try { //Retrieve the desired URI and Profile URI from Querystring parameters String uri = context.Request.QueryString["uri"]; String profile = context.Request.QueryString["profile"]; if (uri == null) { if (context.Request.Url.Query.Equals(String.Empty)) { throw new ArgumentNullException("uri", "Required uri parameter used to designate the URI you wish to expand was not found. Your request must use a URI of the form " + context.Request.Url.ToString() + "?uri=" + Uri.EscapeDataString("http://example.org")); } else { throw new ArgumentNullException("uri", "Required uri parameter used to designate the URI you wish to expand was not found. Your request must use a URI of the form " + context.Request.Url.ToString().Replace(context.Request.Url.Query, String.Empty) + "?uri=" + Uri.EscapeDataString("http://example.org")); } } //Note that the ExpansionLoader class automatically handles all the Caching for us IInMemoryQueryableStore store; String uriHash = new Uri(uri).GetSha256Hash(); if (profile == null) { //Use Default Profile store = this._loader.Load(new Uri(uri)); } else { //Use Custom Profile store = this._loader.Load(new Uri(uri), new Uri(profile)); } String ctype; IStoreWriter writer = MimeTypesHelper.GetStoreWriter(context.Request.AcceptTypes, out ctype); context.Response.ContentType = ctype; writer.Save(store, new StreamParams(context.Response.OutputStream)); } catch (ArgumentNullException argNull) { HandleErrors(context, "Missing Argument", argNull); } catch (RdfParseException parseEx) { HandleErrors(context, "RDF Parser Error", parseEx); } catch (RdfException rdfEx) { HandleErrors(context, "RDF Error", rdfEx); } catch (Exception ex) { HandleErrors(context, "Error", ex); } }
/// <summary> /// Helper function which returns the Results (Graph/Triple Store/SPARQL Results) back to the Client in one of their accepted formats /// </summary> /// <param name="context">Context of the HTTP Request</param> /// <param name="result">Results of the Sparql Query</param> /// <param name="config">Handler Configuration</param> public static void SendToClient(HttpContext context, Object result, BaseHandlerConfiguration config) { MimeTypeDefinition definition = null; String ctype = "text/plain"; String[] acceptTypes = HandlerHelper.GetAcceptTypes(context); //Return the Results if (result is SparqlResultSet) { ISparqlResultsWriter sparqlWriter = null; //Try and get a MIME Type Definition using the HTTP Requests Accept Header if (acceptTypes != null) { definition = MimeTypesHelper.GetDefinitions(acceptTypes).FirstOrDefault(d => d.CanWriteSparqlResults); } //Try and get the registered Definition for SPARQL Results XML if (definition == null) { definition = MimeTypesHelper.GetDefinitions(MimeTypesHelper.SparqlXml[0]).FirstOrDefault(); } //If Definition is still null create a temporary definition if (definition == null) { definition = new MimeTypeDefinition("SPARQL Results XML", MimeTypesHelper.SparqlXml, Enumerable.Empty <String>()); definition.SparqlResultsWriterType = typeof(VDS.RDF.Writing.SparqlXmlWriter); } //Set up the Writer appropriately sparqlWriter = definition.GetSparqlResultsWriter(); context.Response.ContentType = definition.CanonicalMimeType; HandlerHelper.ApplyWriterOptions(sparqlWriter, config); //Clear any existing Response context.Response.Clear(); //Send Result Set to Client context.Response.ContentEncoding = definition.Encoding; sparqlWriter.Save((SparqlResultSet)result, new StreamWriter(context.Response.OutputStream, definition.Encoding)); } else if (result is IGraph) { IRdfWriter rdfWriter = null; //Try and get a MIME Type Definition using the HTTP Requests Accept Header if (acceptTypes != null) { definition = MimeTypesHelper.GetDefinitions(acceptTypes).FirstOrDefault(d => d.CanWriteRdf); } if (definition == null) { //If no appropriate definition then use the GetWriter method instead rdfWriter = MimeTypesHelper.GetWriter(acceptTypes, out ctype); } else { rdfWriter = definition.GetRdfWriter(); } //Setup the writer if (definition != null) { ctype = definition.CanonicalMimeType; } context.Response.ContentType = ctype; HandlerHelper.ApplyWriterOptions(rdfWriter, config); //Clear any existing Response context.Response.Clear(); //Send Graph to Client if (definition != null) { context.Response.ContentEncoding = definition.Encoding; rdfWriter.Save((IGraph)result, new StreamWriter(context.Response.OutputStream, definition.Encoding)); } else { rdfWriter.Save((IGraph)result, new StreamWriter(context.Response.OutputStream)); } } else if (result is ITripleStore) { IStoreWriter storeWriter = null; //Try and get a MIME Type Definition using the HTTP Requests Accept Header if (acceptTypes != null) { definition = MimeTypesHelper.GetDefinitions(acceptTypes).FirstOrDefault(d => d.CanWriteRdfDatasets); } if (definition == null) { //If no appropriate definition then use the GetStoreWriter method instead storeWriter = MimeTypesHelper.GetStoreWriter(acceptTypes, out ctype); } else { storeWriter = definition.GetRdfDatasetWriter(); } //Setup the writer if (definition != null) { ctype = definition.CanonicalMimeType; } context.Response.ContentType = ctype; HandlerHelper.ApplyWriterOptions(storeWriter, config); //Clear any existing Response context.Response.Clear(); //Send Triple Store to Client if (definition != null) { context.Response.ContentEncoding = definition.Encoding; storeWriter.Save((ITripleStore)result, new VDS.RDF.Storage.Params.StreamParams(context.Response.OutputStream, definition.Encoding)); } else { storeWriter.Save((ITripleStore)result, new VDS.RDF.Storage.Params.StreamParams(context.Response.OutputStream)); } } else if (result is ISparqlDataset) { //Wrap in a Triple Store and then call self so the Triple Store writing branch of this if gets called instead TripleStore store = new TripleStore(new DatasetGraphCollection((ISparqlDataset)result)); HandlerHelper.SendToClient(context, store, config); } else { throw new RdfOutputException("Unexpected Result Object of Type '" + result.GetType().ToString() + "' returned - unable to write Objects of this Type to the HTTP Response"); } }
public void Save(IGraph g, string filename) { storeWriter.Save(GetStore(g), filename); }
public void Save(int id, string message) // Decorator { log.Information("Saving message {id}.", id); writer.Save(id, message); // => call Cache log.Information("Saved message {id}.", id); }