public JsonCapacityRepository(IStoreWriter <DateTime> dateWriter, IStoreReader <DateTime> dateReader, IEnumerable <IQuickening> quickenings) { this.writer = dateWriter; this.reader = dateReader; this.quickenings = quickenings; this.serializer = new JsonSerializer(); }
/// <summary>Creates a new instance of the file triple store.</summary> /// <param name="fileStream">Stream to read/write.</param> /// <param name="storeReader">Store reader used to read the file.</param> /// <param name="storeWriter">Store writer to write the file.</param> public FileTripleStore(Stream fileStream, IStoreReader storeReader, IStoreWriter storeWriter) { _fileStream = fileStream; _storeReader = storeReader; _storeWriter = storeWriter; Read(); }
public void Apply(IStoreWriter writer) { if (writer is IHighSpeedWriter) { ((IHighSpeedWriter)writer).HighSpeedModePermitted = this._hiSpeedAllowed; } }
/** * Default constructor. * */ protected StoreManager() { //Create the Configuration Manager (to initialized it) ConfigurationManager confManager = ConfigurationManager.Instance; storeWriter = Factory<IStoreWriter>.Instance.GetNewObject(); }
public void Apply(IStoreWriter writer) { if (writer is IPrettyPrintingWriter) { ((IPrettyPrintingWriter)writer).PrettyPrintMode = this._prettyPrint; } }
/// <summary> /// Writes the given Triple Store to a String and returns the output in your chosen concrete RDF dataset syntax /// </summary> /// <param name="store">Triple Store</param> /// <param name="writer">Writer to use to generate conrete RDF Syntax</param> /// <returns></returns> public static String Write(ITripleStore store, IStoreWriter writer) { System.IO.StringWriter sw = new System.IO.StringWriter(); writer.Save(store, sw); return(sw.ToString()); }
public void Apply(IStoreWriter writer) { if (writer is IHtmlWriter) { ((IHtmlWriter)writer).Stylesheet = this._stylesheet; } }
public void Apply(IStoreWriter writer) { if (writer is ICompressingWriter) { ((ICompressingWriter)writer).CompressionLevel = this._compressionLevel; } }
/// <summary> /// Writes the given Triple Store to a String and returns the output in your chosen concrete RDF dataset syntax /// </summary> /// <param name="store">Triple Store</param> /// <param name="writer">Writer to use to generate conrete RDF Syntax</param> /// <returns></returns> public static String Write(ITripleStore store, IStoreWriter writer) { System.IO.StringWriter sw = new System.IO.StringWriter(); writer.Save(store, new TextWriterParams(sw)); return sw.ToString(); }
public SaveStoreOnCompletionHandler(IStoreWriter writer, TextWriter textWriter) : base(new TripleStore()) { if (writer == null) throw new ArgumentNullException("writer", "Must specify a RDF Dataset Writer to use when the Handler completes RDF handling"); if (textWriter == null) throw new ArgumentNullException("textWriter", "Cannot save RDF to a null TextWriter"); this._writer = writer; this._textWriter = textWriter; }
public SaveStoreOnCompletionHandler(IStoreWriter writer, String file) : base(new TripleStore()) { if (writer == null) throw new ArgumentNullException("writer", "Must specify a RDF Dataset Writer to use when the Handler completes RDF handling"); if (file == null) throw new ArgumentNullException("file", "Cannot save RDF to a null file"); this._writer = writer; this._file = file; }
public static void SaveToFile(this ITripleStore store, string fileName) { using (var output = new StreamWriter(fileName)) { IStoreWriter writer = MimeTypesHelper.GetStoreWriterByFileExtension(MimeTypesHelper.GetTrueFileExtension(fileName)); writer.Save(store, output); } }
public static void Save(this IStoreWriter writer, ITripleStore store, string filename) { using (var output = new StreamWriter(filename)) { writer.Save(store, output); output.Flush(); output.Close(); } }
/// <summary> /// Creates a new GZiped Writer /// </summary> /// <param name="writer">Underlying writer</param> public BaseGZipDatasetWriter(IStoreWriter writer) { if (writer == null) { throw new ArgumentNullException("writer"); } this._writer = writer; this._writer.Warning += this.RaiseWarning; }
/// <summary>Creates a new instance of the file triple store.</summary> /// <param name="filePath">Path of the file to read/write.</param> /// <param name="storeReader">Store reader used to read the file.</param> /// <param name="storeWriter">Store writer to write the file.</param> public FileTripleStore(string filePath, IStoreReader storeReader, IStoreWriter storeWriter) { if (!File.Exists(_filePath = filePath)) { File.Create(filePath).Close(); } _storeReader = storeReader; _storeWriter = storeWriter; Read(); }
public void SetUp() { _orderedOperations = new List <KeyValuePair <TestEntity, Operation> >(); _fixture = new Fixture().Customize(new AutoFakeItEasyCustomization()); _storeWriter = MockStoreWriter(); _sut = _fixture.Freeze <BestEffortUnitOfWork>(); _sut.RegisterStoreFor <TestEntity>(_storeWriter); _rawEntity = new TestEntity(); }
/// <summary>Creates a new instance of the file triple store.</summary> /// <param name="filePath">Path of the file to read/write.</param> /// <param name="storeReader">Store reader used to read the file.</param> /// <param name="storeWriter">Store writer to write the file.</param> public FileTripleStore(string filePath, IStoreReader storeReader, IStoreWriter storeWriter) { if (!File.Exists(_filePath = EnsureAbsolute(filePath))) { File.Create(_filePath).Close(); } _watcher = CreateFileHooks(_filePath); _storeReader = storeReader; _storeWriter = storeWriter; Read(); }
public SaveStoreOnCompletionHandler(IStoreWriter writer, String file) : base(new TripleStore()) { if (writer == null) { throw new ArgumentNullException("writer", "Must specify a RDF Dataset Writer to use when the Handler completes RDF handling"); } if (file == null) { throw new ArgumentNullException("file", "Cannot save RDF to a null file"); } this._writer = writer; this._file = file; }
public SaveStoreOnCompletionHandler(IStoreWriter writer, TextWriter textWriter) : base(new TripleStore()) { if (writer == null) { throw new ArgumentNullException("writer", "Must specify a RDF Dataset Writer to use when the Handler completes RDF handling"); } if (textWriter == null) { throw new ArgumentNullException("textWriter", "Cannot save RDF to a null TextWriter"); } this._writer = writer; this._textWriter = textWriter; }
// Make it a requirement // Constructor public MessageStore(IStoreWriter writer, IStoreReader reader, IFileLocator fileLocator) { if (writer == null) throw new ArgumentNullException("Writer"); if (reader == null) throw new ArgumentNullException("Reader"); if (fileLocator == null) throw new ArgumentNullException("FileLocator"); this.fileLocator = fileLocator; this.writer = writer; this.reader = reader; }
private void CreateIOHandlers(string extension) { switch (extension) { case ".nq": _storeReader = new NQuadsParser(); _storeWriter = new NQuadsWriter(); break; case ".ttl": _rdfReader = new TurtleParser(); _rdfWriter = new CompressingTurtleWriter(); break; case ".trig": _storeReader = new TriGParser(); _storeWriter = new TriGWriter() { CompressionLevel = -1 }; break; case ".xml": _rdfReader = new RdfXmlParser(); _rdfWriter = new RdfXmlWriter(); break; case ".n3": _rdfReader = new Notation3Parser(); _rdfWriter = new Notation3Writer(); break; case ".trix": _storeReader = new TriXParser(); _storeWriter = new TriXWriter(); break; case ".json": _rdfReader = new RdfJsonParser(); _rdfWriter = new RdfJsonWriter(); break; default: throw new ArgumentOutOfRangeException(System.String.Format("Provided file path does not allow to detect a type of the RDF serialization type.")); } }
public JsonCmdApplicationConfigurationRepository( IStoreWriter<CmdApplicationConfiguration> fileStoreWriter, IStoreReader<CmdApplicationConfiguration> fileStoreReader) { if (fileStoreWriter == null) throw new ArgumentNullException(nameof(fileStoreWriter)); if (fileStoreReader == null) throw new ArgumentNullException(nameof(fileStoreReader)); this.FileStoreWriter = fileStoreWriter; this.FileStoreReader = fileStoreReader; this.serializer = new JsonSerializer(); this.serializer.Converters.Add(new NameJsonConverter()); this.serializer.TypeNameHandling = TypeNameHandling.Auto; }
private void TestWriter(IStoreWriter writer, IStoreReader reader, bool useMultiThreaded, int compressionLevel) { TripleStore store = new TripleStore(); Graph g = new Graph(); g.LoadFromEmbeddedResource("VDS.RDF.Configuration.configuration.ttl"); g.BaseUri = null; store.Add(g); g = new Graph(); g.LoadFromFile("resources\\InferenceTest.ttl"); g.BaseUri = new Uri("http://example.org/graph"); store.Add(g); g = new Graph(); g.LoadFromFile(@"resources\cyrillic.rdf"); g.BaseUri = new Uri("http://example.org/cyrillic"); store.Add(g); if (writer is ICompressingWriter) { ((ICompressingWriter)writer).CompressionLevel = compressionLevel; } #if !NETCOREAPP2_0 if (writer is IMultiThreadedWriter) { ((IMultiThreadedWriter)writer).UseMultiThreadedWriting = useMultiThreaded; } #endif System.IO.StringWriter strWriter = new System.IO.StringWriter(); writer.Save(store, strWriter); Console.WriteLine(strWriter.ToString()); Assert.NotEqual(strWriter.ToString(), String.Empty); TripleStore store2 = new TripleStore(); reader.Load(store2, new System.IO.StringReader(strWriter.ToString())); foreach (IGraph graph in store.Graphs) { Assert.True(store2.HasGraph(graph.BaseUri), "Parsed Stored should have contained serialized graph"); Assert.Equal(graph, store2[graph.BaseUri]); } }
public MessageStore(IFileLocator fileLocator, IStoreWriter writer, IStoreReader reader) { if (fileLocator == null) { throw new ArgumentNullException("fileLocator"); } if (writer == null) { throw new ArgumentNullException("writer"); } if (reader == null) { throw new ArgumentNullException("reader"); } this.fileLocator = fileLocator; this.writer = writer; this.reader = reader; }
public JsonCmdApplicationConfigurationRepository( IStoreWriter <CmdApplicationConfiguration> fileStoreWriter, IStoreReader <CmdApplicationConfiguration> fileStoreReader) { if (fileStoreWriter == null) { throw new ArgumentNullException(nameof(fileStoreWriter)); } if (fileStoreReader == null) { throw new ArgumentNullException(nameof(fileStoreReader)); } this.FileStoreWriter = fileStoreWriter; this.FileStoreReader = fileStoreReader; this.serializer = new JsonSerializer(); this.serializer.Converters.Add(new NameJsonConverter()); this.serializer.TypeNameHandling = TypeNameHandling.Auto; }
public MessageStore(IStoreWriter writer, IStoreReader reader, IFileLocator fileLocator) { // protecting invariance with guard clauses if (writer == null) { throw new ArgumentNullException("fileLocator"); } if (reader == null) { throw new ArgumentNullException("fileLocator"); } if (fileLocator == null) { throw new ArgumentNullException("fileLocator"); } this.fileLocator = fileLocator; this.writer = writer; this.reader = reader; }
public StudentController( ILogger <StudentController> logger, UnitOfWorkProperty <SchoolContext> context, UnitOfWorkProperty <DbConnection> connection, UnitOfWorkProperty <DbTransaction> transaction, SchoolContext context2, IStoreReader <Student> readerStore, ICountableStorage <Student> countableStorage, ICountableStorage <SchoolContext, Student> countableStorage2, IAcquirableStorage <Student> acquirableStorage, IAcquirableStorage <SchoolContext, Student> acquirableStorage2, IStoreWriter <SchoolContext, Student> writerStore, IStoreBulkWriter <Student> bulkWriterStore, IStoreReader <CourseTest> storeCourseTest, ILazy <ICreatableStorage <Log> > creatableStorage) { _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _context = context ?? throw new ArgumentNullException(nameof(context)); _connection = connection ?? throw new ArgumentNullException(nameof(connection)); _transaction = transaction ?? throw new ArgumentNullException(nameof(transaction)); _context2 = context2 ?? throw new ArgumentNullException(nameof(context2)); _readerStore = readerStore ?? throw new ArgumentNullException(nameof(readerStore)); _countableStorage = countableStorage ?? throw new ArgumentNullException(nameof(countableStorage)); _countableStorage2 = countableStorage2 ?? throw new ArgumentNullException(nameof(countableStorage2)); _acquirableStorage = acquirableStorage ?? throw new ArgumentNullException(nameof(acquirableStorage)); _acquirableStorage2 = acquirableStorage2 ?? throw new ArgumentNullException(nameof(acquirableStorage2)); _writerStore = writerStore ?? throw new ArgumentNullException(nameof(writerStore)); _bulkWriterStore = bulkWriterStore ?? throw new ArgumentNullException(nameof(bulkWriterStore)); _storeCourseTest = storeCourseTest ?? throw new ArgumentNullException(nameof(storeCourseTest)); _creatableStorage = creatableStorage ?? throw new ArgumentNullException(nameof(creatableStorage)); }
private void TestWriter(IStoreWriter writer, IStoreReader reader, bool useMultiThreaded, int compressionLevel) { TripleStore store = new TripleStore(); Graph g = new Graph(); g.LoadFromEmbeddedResource("VDS.RDF.Configuration.configuration.ttl"); g.BaseUri = null; store.Add(g); g = new Graph(); g.LoadFromFile("InferenceTest.ttl"); g.BaseUri = new Uri("http://example.org/graph"); store.Add(g); if (writer is ICompressingWriter) { ((ICompressingWriter)writer).CompressionLevel = compressionLevel; } if (writer is IMultiThreadedWriter) { ((IMultiThreadedWriter)writer).UseMultiThreadedWriting = useMultiThreaded; } System.IO.StringWriter strWriter = new System.IO.StringWriter(); writer.Save(store, strWriter); Console.WriteLine(strWriter.ToString()); Assert.IsFalse(strWriter.ToString().Equals(String.Empty)); TripleStore store2 = new TripleStore(); reader.Load(store2, new System.IO.StringReader(strWriter.ToString())); foreach (IGraph graph in store.Graphs) { Assert.IsTrue(store2.HasGraph(graph.BaseUri), "Parsed Stored should have contained serialized graph"); Assert.AreEqual(graph, store2[graph.BaseUri], "Parsed Graph should be equal to original graph"); } }
public void ProcessRequest(HttpContext context) { //Turn on Response Buffering context.Response.Buffer = true; //Prepare the Cache Directories if (!Path.IsPathRooted(this._cacheDir)) { this._cacheDir = context.Server.MapPath(this._cacheDir); } if (this._loader == null) { this._loader = new ExpansionLoader(this._cacheDir); } //Add our Custom Headers try { context.Response.Headers.Add("X-dotNetRDF-Version", Assembly.GetAssembly(typeof(VDS.RDF.IGraph)).GetName().Version.ToString()); } catch (PlatformNotSupportedException) { context.Response.AddHeader("X-dotNetRDF-Version", Assembly.GetAssembly(typeof(VDS.RDF.IGraph)).GetName().Version.ToString()); } try { //Retrieve the desired URI and Profile URI from Querystring parameters String uri = context.Request.QueryString["uri"]; String profile = context.Request.QueryString["profile"]; if (uri == null) { if (context.Request.Url.Query.Equals(String.Empty)) { throw new ArgumentNullException("uri", "Required uri parameter used to designate the URI you wish to expand was not found. Your request must use a URI of the form " + context.Request.Url.ToString() + "?uri=" + Uri.EscapeDataString("http://example.org")); } else { throw new ArgumentNullException("uri", "Required uri parameter used to designate the URI you wish to expand was not found. Your request must use a URI of the form " + context.Request.Url.ToString().Replace(context.Request.Url.Query, String.Empty) + "?uri=" + Uri.EscapeDataString("http://example.org")); } } //Note that the ExpansionLoader class automatically handles all the Caching for us IInMemoryQueryableStore store; String uriHash = new Uri(uri).GetSha256Hash(); if (profile == null) { //Use Default Profile store = this._loader.Load(new Uri(uri)); } else { //Use Custom Profile store = this._loader.Load(new Uri(uri), new Uri(profile)); } String ctype; IStoreWriter writer = MimeTypesHelper.GetStoreWriter(context.Request.AcceptTypes, out ctype); context.Response.ContentType = ctype; writer.Save(store, new StreamParams(context.Response.OutputStream)); } catch (ArgumentNullException argNull) { HandleErrors(context, "Missing Argument", argNull); } catch (RdfParseException parseEx) { HandleErrors(context, "RDF Parser Error", parseEx); } catch (RdfException rdfEx) { HandleErrors(context, "RDF Error", rdfEx); } catch (Exception ex) { HandleErrors(context, "Error", ex); } }
public StoreCache(IStoreWriter writer, IStoreReader reader) { this.cache = new ConcurrentDictionary <int, Maybe <string> >(); this.writer = writer; this.reader = reader; }
/// <summary> /// Creates a new GZiped Writer /// </summary> /// <param name="writer">Underlying writer</param> public BaseGZipDatasetWriter(IStoreWriter writer) { if (writer == null) throw new ArgumentNullException("writer"); this._writer = writer; this._writer.Warning += this.RaiseWarning; }
private void TestWriter(IStoreWriter writer, IStoreReader reader, bool useMultiThreaded) { this.TestWriter(writer, reader, useMultiThreaded, Options.DefaultCompressionLevel); }
public void Setup() { this._g = new Graph(); this._g.LoadFromEmbeddedResource("VDS.RDF.Configuration.configuration.ttl"); this._results = this._g.ExecuteQuery("SELECT * WHERE { ?s ?p ?o }") as SparqlResultSet; foreach (MimeTypeDefinition def in MimeTypesHelper.Definitions) { // Omit CSV since that is a lossy format that does not round trip if (def.CanonicalMimeType.Equals("text/csv")) { continue; } if (def.CanWriteRdf && def.CanParseRdf) { IRdfWriter writer = def.GetRdfWriter(); bool isManual = !def.CanonicalFileExtension.EndsWith(".gz"); String filename = "gzip-tests" + (isManual ? String.Empty : "-auto") + "." + def.CanonicalFileExtension + (isManual ? ".gz" : String.Empty); if (isManual) { using (StreamWriter output = new StreamWriter(new GZipStream(new FileStream(filename, FileMode.Create, FileAccess.Write), CompressionMode.Compress))) { writer.Save(this._g, output); output.Close(); } this._manualTestFiles.Add(filename); } else { writer.Save(this._g, filename); this._autoTestFiles.Add(filename); } } else if (def.CanParseRdfDatasets && def.CanWriteRdfDatasets) { IStoreWriter writer = def.GetRdfDatasetWriter(); bool isManual = !def.CanonicalFileExtension.EndsWith(".gz"); String filename = "gzip-tests-datasets" + (isManual ? String.Empty : "-auto") + "." + def.CanonicalFileExtension + (isManual ? ".gz" : String.Empty); TripleStore store = new TripleStore(); store.Add(this._g); if (isManual) { using (Stream output = new GZipStream(new FileStream(filename, FileMode.Create, FileAccess.Write), CompressionMode.Compress)) { writer.Save(store, new StreamWriter(output)); output.Close(); } this._manualDatasetTestFiles.Add(filename); } else { writer.Save(store, new StreamWriter(new FileStream(filename, FileMode.Create, FileAccess.Write))); this._autoDatasetTestFiles.Add(filename); } } else if (def.CanParseSparqlResults && def.CanWriteSparqlResults) { ISparqlResultsWriter writer = def.GetSparqlResultsWriter(); bool isManual = !def.CanonicalFileExtension.EndsWith(".gz"); String filename = "gzip-tests-results" + (isManual ? String.Empty : "-auto") + "." + def.CanonicalFileExtension + (isManual ? ".gz" : String.Empty); if (isManual) { using (StreamWriter output = new StreamWriter(new GZipStream(new FileStream(filename, FileMode.Create, FileAccess.Write), CompressionMode.Compress))) { writer.Save(this._results, output); output.Close(); } this._manualResultsTestFiles.Add(filename); } else { writer.Save(this._results, new StreamWriter(filename)); this._autoResultsTestFiles.Add(filename); } } } }
/// <summary> /// Helper function which returns the Results (Graph/Triple Store/SPARQL Results) back to the Client in one of their accepted formats /// </summary> /// <param name="context">Context of the HTTP Request</param> /// <param name="result">Results of the Sparql Query</param> /// <param name="config">Handler Configuration</param> public static void SendToClient(HttpContext context, Object result, BaseHandlerConfiguration config) { MimeTypeDefinition definition = null; String ctype = "text/plain"; String[] acceptTypes = HandlerHelper.GetAcceptTypes(context); //Return the Results if (result is SparqlResultSet) { ISparqlResultsWriter sparqlWriter = null; //Try and get a MIME Type Definition using the HTTP Requests Accept Header if (acceptTypes != null) { definition = MimeTypesHelper.GetDefinitions(acceptTypes).FirstOrDefault(d => d.CanWriteSparqlResults); } //Try and get the registered Definition for SPARQL Results XML if (definition == null) { definition = MimeTypesHelper.GetDefinitions(MimeTypesHelper.SparqlXml[0]).FirstOrDefault(); } //If Definition is still null create a temporary definition if (definition == null) { definition = new MimeTypeDefinition("SPARQL Results XML", MimeTypesHelper.SparqlXml, Enumerable.Empty <String>()); definition.SparqlResultsWriterType = typeof(VDS.RDF.Writing.SparqlXmlWriter); } //Set up the Writer appropriately sparqlWriter = definition.GetSparqlResultsWriter(); context.Response.ContentType = definition.CanonicalMimeType; HandlerHelper.ApplyWriterOptions(sparqlWriter, config); //Clear any existing Response context.Response.Clear(); //Send Result Set to Client context.Response.ContentEncoding = definition.Encoding; sparqlWriter.Save((SparqlResultSet)result, new StreamWriter(context.Response.OutputStream, definition.Encoding)); } else if (result is IGraph) { IRdfWriter rdfWriter = null; //Try and get a MIME Type Definition using the HTTP Requests Accept Header if (acceptTypes != null) { definition = MimeTypesHelper.GetDefinitions(acceptTypes).FirstOrDefault(d => d.CanWriteRdf); } if (definition == null) { //If no appropriate definition then use the GetWriter method instead rdfWriter = MimeTypesHelper.GetWriter(acceptTypes, out ctype); } else { rdfWriter = definition.GetRdfWriter(); } //Setup the writer if (definition != null) { ctype = definition.CanonicalMimeType; } context.Response.ContentType = ctype; HandlerHelper.ApplyWriterOptions(rdfWriter, config); //Clear any existing Response context.Response.Clear(); //Send Graph to Client if (definition != null) { context.Response.ContentEncoding = definition.Encoding; rdfWriter.Save((IGraph)result, new StreamWriter(context.Response.OutputStream, definition.Encoding)); } else { rdfWriter.Save((IGraph)result, new StreamWriter(context.Response.OutputStream)); } } else if (result is ITripleStore) { IStoreWriter storeWriter = null; //Try and get a MIME Type Definition using the HTTP Requests Accept Header if (acceptTypes != null) { definition = MimeTypesHelper.GetDefinitions(acceptTypes).FirstOrDefault(d => d.CanWriteRdfDatasets); } if (definition == null) { //If no appropriate definition then use the GetStoreWriter method instead storeWriter = MimeTypesHelper.GetStoreWriter(acceptTypes, out ctype); } else { storeWriter = definition.GetRdfDatasetWriter(); } //Setup the writer if (definition != null) { ctype = definition.CanonicalMimeType; } context.Response.ContentType = ctype; HandlerHelper.ApplyWriterOptions(storeWriter, config); //Clear any existing Response context.Response.Clear(); //Send Triple Store to Client if (definition != null) { context.Response.ContentEncoding = definition.Encoding; storeWriter.Save((ITripleStore)result, new VDS.RDF.Storage.Params.StreamParams(context.Response.OutputStream, definition.Encoding)); } else { storeWriter.Save((ITripleStore)result, new VDS.RDF.Storage.Params.StreamParams(context.Response.OutputStream)); } } else if (result is ISparqlDataset) { //Wrap in a Triple Store and then call self so the Triple Store writing branch of this if gets called instead TripleStore store = new TripleStore(new DatasetGraphCollection((ISparqlDataset)result)); HandlerHelper.SendToClient(context, store, config); } else { throw new RdfOutputException("Unexpected Result Object of Type '" + result.GetType().ToString() + "' returned - unable to write Objects of this Type to the HTTP Response"); } }
private bool SetOptions(String[] args) { if (args.Length == 0 || (args.Length == 1 && args[0].Equals("-help"))) { this.ShowUsage(); return false; } //Look through the arguments to see what we've been asked to do foreach (String arg in args) { if (arg.StartsWith("-uri:")) { this._inputs.Add(arg); } else if (arg.StartsWith("-hs")) { if (arg.Contains(':')) { bool hs; if (Boolean.TryParse(arg.Substring(arg.IndexOf(':') + 1), out hs)) { this._options.Add(new HighSpeedOption(hs)); } else { this._options.Add(new HighSpeedOption(true)); } } else { this._options.Add(new HighSpeedOption(true)); } } else if (arg.StartsWith("-pp")) { if (arg.Contains(':')) { bool pp; if (Boolean.TryParse(arg.Substring(arg.IndexOf(':') + 1), out pp)) { this._options.Add(new PrettyPrintingOption(pp)); } else { this._options.Add(new PrettyPrintingOption(true)); } } else { this._options.Add(new PrettyPrintingOption(true)); } } else if (arg.StartsWith("-c")) { if (arg.Contains(':')) { int c; if (Int32.TryParse(arg.Substring(arg.IndexOf(':') + 1), out c)) { this._options.Add(new CompressionLevelOption(c)); } else { this._options.Add(new CompressionLevelOption(WriterCompressionLevel.Default)); } } else { this._options.Add(new CompressionLevelOption(WriterCompressionLevel.Default)); } } else if (arg.StartsWith("-stylesheet:")) { String stylesheet = arg.Substring(arg.IndexOf(':') + 1); this._options.Add(new StylesheetOption(stylesheet)); } else if (arg.Equals("-merge")) { this._merge = true; } else if (arg.Equals("-overwrite")) { this._overwrite = true; } else if (arg.Equals("-dataset")) { this._dataset = true; this._merge = true; } else if (arg.StartsWith("-out:") || arg.StartsWith("-output:")) { this._output = arg.Substring(arg.IndexOf(':') + 1); //If the Writers have not been set then we'll set them now if (this._writer == null && this._storeWriter == null) { String format; try { format = MimeTypesHelper.GetMimeType(Path.GetExtension(this._output)); } catch (RdfException) { Console.Error.WriteLine("rdfConvert: The File Extension '" + Path.GetExtension(this._output) + "' is not permissible since dotNetRDF cannot infer a MIME type from the extension"); return false; } try { this._writer = MimeTypesHelper.GetWriter(format); } catch (RdfException) { //Supress this error } try { this._storeWriter = MimeTypesHelper.GetStoreWriter(format); if (this._writer == null) { this._merge = true; } else if (this._writer is NTriplesWriter && !Path.GetExtension(this._output).Equals(".nt")) { this._writer = null; this._merge = true; } } catch (RdfException) { //Suppress this error } if (this._writer == null && this._storeWriter == null) { Console.Error.WriteLine("rdfConvert: The MIME Type '" + format + "' is not permissible since dotNetRDF does not support outputting in that format"); return false; } } } else if (arg.StartsWith("-outformat:")) { String format = arg.Substring(arg.IndexOf(':') + 1); if (!format.Contains("/")) { try { format = MimeTypesHelper.GetMimeType(format); } catch (RdfException) { Console.Error.WriteLine("rdfConvert: The File Extension '" + format + "' is not permissible since dotNetRDF cannot infer a MIME type from the extension"); return false; } } //Validate the MIME Type if (!IsValidMimeType(format)) { Console.Error.WriteLine("rdfConvert: The MIME Type '" + format + "' is not permissible since dotNetRDF does not support outputting in that format"); return false; } try { this._writer = MimeTypesHelper.GetWriter(format); this._outExt = MimeTypesHelper.GetFileExtension(this._writer); } catch (RdfException) { //Supress this error } try { this._storeWriter = MimeTypesHelper.GetStoreWriter(format); if (this._writer == null) { //In the event that we can't get a valid Writer then individual graphs //will be put into a Store and output as a Dataset this._merge = true; this._outExt = MimeTypesHelper.GetFileExtension(this._storeWriter); } else if (this._writer is NTriplesWriter && (!format.Equals("nt") || !format.Equals(".nt") || !format.Equals("text/plain"))) { this._writer = null; this._merge = true; this._outExt = MimeTypesHelper.GetFileExtension(this._storeWriter); } } catch (RdfException) { //Suppress this error } if (this._writer == null && this._storeWriter == null) { Console.Error.WriteLine("rdfConvert: The MIME Type '" + format + "' is not permissible since dotNetRDF does not support outputting in that format"); return false; } } else if (arg.StartsWith("-outext:")) { this._outExt = arg.Substring(arg.IndexOf(':') + 1); if (!this._outExt.StartsWith(".")) this._outExt = "." + this._outExt; } else if (arg.Equals("-debug")) { this._debug = true; } else if (arg.Equals("-help")) { //Ignore help argument if other arguments are present } else if (arg.Equals("-nocache")) { Options.UriLoaderCaching = false; } else if (arg.Equals("-nobom")) { Options.UseBomForUtf8 = false; } else if (arg.Equals("-warnings")) { this._warnings = true; UriLoader.Warning += this.ShowWarning; UriLoader.StoreWarning += this.ShowWarning; FileLoader.Warning += this.ShowWarning; FileLoader.StoreWarning += this.ShowWarning; } else { //Anything else is treated as an input file this._inputs.Add(arg); } } //If there are no this._inputs then we'll abort if (this._inputs.Count == 0) { Console.Error.WriteLine("rdfConvert: No Inputs were provided - please provide one/more files or URIs you wish to convert"); return false; } //If there are no writers specified then we'll abort if (this._writer == null && this._storeWriter == null) { Console.Error.WriteLine("rdfConvert: Aborting since no output options have been specified, use the -out:filename or -outformat: arguments to specify output format"); return false; } if (!this._outExt.Equals(String.Empty)) { if (!this._outExt.StartsWith(".")) this._outExt = "." + this._outExt; } else if (!this._output.Equals(String.Empty)) { this._outExt = Path.GetExtension(this._output); } //Apply the Options to the Writers foreach (IConversionOption option in this._options) { if (this._writer != null) option.Apply(this._writer); if (this._storeWriter != null) option.Apply(this._storeWriter); } return true; }
/// <summary> /// Runs the task /// </summary> /// <returns></returns> protected override TaskResult RunTaskInternal() { MimeTypeDefinition def = MimeTypesHelper.GetDefinitionsByFileExtension(MimeTypesHelper.GetTrueFileExtension(this._file)).FirstOrDefault(d => d.CanWriteRdfDatasets); if (def == null) { throw new RdfOutputException("Cannot Export the Store to the selected File since dotNetRDF was unable to select a writer to use based on the File Extension"); } IStoreWriter writer = def.GetRdfDatasetWriter(); if (writer is IMultiThreadedWriter) { ((IMultiThreadedWriter)writer).UseMultiThreadedWriting = false; } TripleStore store = new TripleStore(); if (writer is TriXWriter) { //For TriX must load all into memory and then write out all at once foreach (Uri u in this.ListGraphs()) { Graph g = new Graph(); this._manager.LoadGraph(g, u); g.BaseUri = u; store.Add(g); this.Information = "Loading into memory prior to export, loaded " + store.Graphs.Sum(x => x.Triples.Count) + " Triple(s) in " + store.Graphs.Count + " Graph(s) so far..."; if (this.HasBeenCancelled) { this.Information = "Export Cancelled"; return(new TaskResult(true)); } } this.Information = "Exporting Data all at once, have " + store.Graphs.Sum(x => x.Triples.Count) + " Triple(s) in " + store.Graphs.Count + " Graph(s) to export..."; writer.Save(store, new StreamWriter(this._file)); this.Information = "Exported " + store.Graphs.Sum(x => x.Triples.Count) + " Triple(s) in " + store.Graphs.Count + " Graph(s)"; } else { if (File.Exists(this._file)) { File.Delete(this._file); } //For non-TriX formats assume it is safe to append one Graph at a time to the file int graphCount = 0, tripleCount = 0; foreach (Uri u in this.ListGraphs()) { using (FileStream stream = new FileStream(this._file, FileMode.Append)) { if (writer is IFormatterBasedWriter) { //Stream via a WriteThroughHandler this.Information = "Stream Exporting Graph " + (u != null ? u.AbsoluteUri : "Default"); WriteThroughHandler handler = new WriteThroughHandler(((IFormatterBasedWriter)writer).TripleFormatterType, new StreamWriter(stream), true); ExportProgressHandler progHandler = new ExportProgressHandler(handler, this, tripleCount); this._manager.LoadGraph(progHandler, u); graphCount++; tripleCount = progHandler.TripleCount; this.Information = "Finished Stream Exporting Graph " + (u != null ? u.AbsoluteUri : "Default") + ", exported " + tripleCount + " Triple(s) in " + graphCount + " Graph(s) so far..."; } else { //Load Graph into memory Graph g = new Graph(); g.BaseUri = u; this.Information = "Loading Graph " + (u != null ? u.AbsoluteUri : "Default"); this._manager.LoadGraph(g, u); g.BaseUri = u; if (this.HasBeenCancelled) { stream.Close(); this.Information = "Export Cancelled, exported " + tripleCount + " Triple(s) in " + graphCount + " Graph(s)"; return(new TaskResult(true)); } graphCount++; tripleCount += g.Triples.Count; //Save it store.Add(g); writer.Save(store, new StreamWriter(stream, def.Encoding)); store.Remove(u); this.Information = "Exporting Data graph by graph, exported " + tripleCount + " Triple(s) in " + graphCount + " Graph(s) so far..."; } //Check for cancellation if (this.HasBeenCancelled) { stream.Close(); this.Information = "Export Cancelled, exported " + tripleCount + " Triple(s) in " + graphCount + " Graph(s)"; return(new TaskResult(true)); } } } this.Information = "Exported " + tripleCount + " Triple(s) in " + graphCount + " Graph(s)"; } return(new TaskResult(true)); }
public StoreLogger(ILogger log, IStoreWriter writer, IStoreReader reader) { this.log = log; this.writer = writer; this.reader = reader; }
/// <summary> /// Selects the appropriate File Extension for the given Store Writer /// </summary> /// <param name="writer">Store Writer</param> /// <returns></returns> public static String GetFileExtension(IStoreWriter writer) { if (!_init) Init(); Type requiredType = writer.GetType(); foreach (MimeTypeDefinition definition in MimeTypesHelper.Definitions) { if (requiredType.Equals(definition.RdfDatasetWriterType)) { return definition.CanonicalFileExtension; } } throw new RdfException("Unable to determine the appropriate File Extension for the Store Writer '" + writer.GetType().ToString() + "'"); }
public void RunConvert(String[] args) { //Set the Options if (!this.SetOptions(args)) { //If SetOptions returns false then some options were invalid and errors have been output to the error stream return; } //First grab the MIME Type Definitions for the conversion List <MimeTypeDefinition> defs = MimeTypesHelper.GetDefinitions(this._outFormats).ToList(); //Process each input to determine the Conversion Handler to use foreach (IConversionInput input in this._inputs) { String outFile; String ext = this._outExt; //First determine the writer we'll use MimeTypeDefinition graphDef = defs.FirstOrDefault(d => d.CanWriteRdf); if (graphDef != null) { //Then generate the output filename if (ext.Equals(String.Empty)) { ext = "." + graphDef.CanonicalFileExtension; } if (this._inputs.Count == 1 && !this._outputFilename.Equals(String.Empty)) { outFile = this._outputFilename; } else { outFile = input.GetFilename(this._outputFilename, ext); } //Check it doesn't already exist or overwrite is enabled if (File.Exists(outFile) && !this._overwrite) { Console.Error.WriteLine("rdfConvert: Warning: Skipping Conversion of Input " + input.ToString() + " as this would generate the Output File '" + outFile + "' which already exists and the -overwrite option was not specified"); continue; } //Get the Writer and apply Conversion Options IRdfWriter writer = graphDef.GetRdfWriter(); foreach (IConversionOption option in this._options) { option.Apply(writer); } //If -best always use SaveOnCompletionHandler if (this._best) { if (this._verbose) { Console.WriteLine("rdfConvert: Using Best Quality data conversion subject to user specified compression options"); } input.ConversionHandler = new SaveOnCompletionHandler(writer, new StreamWriter(outFile, false, graphDef.Encoding)); } else { //Use the fast WriteThroughHandler where possible if (writer is IFormatterBasedWriter) { if (this._verbose) { Console.WriteLine("rdfConvert: Using Streaming Conversion with formatter " + ((IFormatterBasedWriter)writer).TripleFormatterType.Name); } input.ConversionHandler = new WriteToFileHandler(outFile, graphDef.Encoding, ((IFormatterBasedWriter)writer).TripleFormatterType); } else { //Can't use it in this case if (this._verbose) { Console.WriteLine("rdfConvert: Warning: Target Format not suitable for streaming conversion, input data will be loaded into memory prior to conversion"); } input.ConversionHandler = new SaveOnCompletionHandler(writer, new StreamWriter(outFile, false, graphDef.Encoding)); } } } else { MimeTypeDefinition storeDef = defs.FirstOrDefault(d => d.CanWriteRdfDatasets); if (storeDef != null) { //Then generate the output filename if (ext.Equals(String.Empty)) { ext = "." + storeDef.CanonicalFileExtension; } outFile = input.GetFilename(this._outputFilename, ext); //Get the Writer and apply conversion options IStoreWriter writer = storeDef.GetRdfDatasetWriter(); foreach (IConversionOption option in this._options) { option.Apply(writer); } //If -best always use SaveOnCompletionHandler if (this._best) { if (this._verbose) { Console.WriteLine("rdfConvert: Using Best Quality data conversion subject to user specified compression options"); } input.ConversionHandler = new SaveStoreOnCompletionHandler(writer, new StreamWriter(outFile, false, storeDef.Encoding)); } else { //Use the fast WriteThroughHandler where possible if (writer is IFormatterBasedWriter) { if (this._verbose) { Console.WriteLine("rdfConvert: Using Streaming Conversion with formatter " + ((IFormatterBasedWriter)writer).TripleFormatterType.Name); } input.ConversionHandler = new WriteToFileHandler(outFile, graphDef.Encoding, ((IFormatterBasedWriter)writer).TripleFormatterType); } else { if (this._verbose) { Console.WriteLine("rdfConvert: Warning: Target Format not suitable for streaming conversion, input data will be loaded into memory prior to conversion"); } input.ConversionHandler = new SaveStoreOnCompletionHandler(writer, new StreamWriter(outFile, false, storeDef.Encoding)); } } } else { Console.Error.WriteLine("rdfConvert: Warning: Skipping Conversion of Input " + input.ToString() + " as unable to determine how to convert it"); continue; } } //Then do the Conversion Console.WriteLine("rdfConvert: Converting Input " + input.ToString() + " to '" + outFile + "'..."); try { if (this._verbose) { input.ConversionHandler = new ConversionProgressHandler(input.ConversionHandler); Console.WriteLine("rdfConvert: Debug: Conversion Handler is " + input.ConversionHandler.GetType().FullName); } input.Convert(); Console.WriteLine("rdfConvert: Converted Input " + input.ToString() + " to '" + outFile + "' OK"); } catch (RdfParseException parseEx) { Console.Error.WriteLine("rdfConvert: Error: Error Converting Input " + input.ToString() + " due to a RDF Parse Exception"); Console.Error.WriteLine(parseEx.Message); if (this._debug) { this.DebugErrors(parseEx); } } catch (RdfException rdfEx) { Console.Error.WriteLine("rdfConvert: Error: Error Converting Input " + input.ToString() + " due to a RDF Exception"); Console.Error.WriteLine(rdfEx.Message); if (this._debug) { this.DebugErrors(rdfEx); } } catch (Exception ex) { Console.Error.WriteLine("rdfConvert: Error: Error Converting Input " + input.ToString() + " due to a Unexpected Exception"); Console.Error.WriteLine(ex.Message); if (this._debug) { this.DebugErrors(ex); } } } }
/// <summary> /// Registers a writer as the default RDF Dataset Writer for all the given MIME types and updates relevant definitions to include the MIME types and file extensions /// </summary> /// <param name="writer">RDF Dataset Writer</param> /// <param name="mimeTypes">MIME Types</param> /// <param name="fileExtensions">File Extensions</param> public static void RegisterWriter(IStoreWriter writer, IEnumerable<String> mimeTypes, IEnumerable<String> fileExtensions) { if (!_init) Init(); if (!mimeTypes.Any()) throw new RdfException("Cannot register a writer without specifying at least 1 MIME Type"); //Get any existing defintions that are to be altered IEnumerable<MimeTypeDefinition> existing = GetDefinitions(mimeTypes); foreach (MimeTypeDefinition def in existing) { foreach (String type in mimeTypes) { def.AddMimeType(type); } foreach (String ext in fileExtensions) { def.AddFileExtension(ext); } def.RdfDatasetWriterType = writer.GetType(); } //Create any new defintions IEnumerable<String> newTypes = mimeTypes.Where(t => !GetDefinitions(t).Any()); if (newTypes.Any()) { MimeTypeDefinition newDef = new MimeTypeDefinition(String.Empty, newTypes, fileExtensions); newDef.RdfDatasetWriterType = writer.GetType(); AddDefinition(newDef); } }
public StoreCache(IStoreWriter writer, IStoreReader reader) { this.cache = new ConcurrentDictionary<int, Maybe<string>>(); this.writer = writer; this.reader = reader; }
public StoreLogger(IStoreWriter writer, IStoreReader reader) { this.writer = writer; this.reader = reader; }
/// <summary> /// Create a new writer instance that wraps the specified <see cref="IStoreWriter"/> instance. /// </summary> /// <param name="storeWriter">The <see cref="IStoreWriter"/> instance that will do the writing.</param> public SingleGraphWriter(IStoreWriter storeWriter) { _storeWriter = storeWriter; _storeWriter.Warning += RaiseGraphWriterWarning; }
private void CreateIOHandlers(string extension) { switch (extension) { case ".nq": _storeReader = new NQuadsParser(); _storeWriter = new NQuadsWriter(); break; case ".ttl": _rdfReader = new TurtleParser(); _rdfWriter = new CompressingTurtleWriter(); break; case ".trig": _storeReader = new TriGParser(); _storeWriter = new TriGWriter(); break; case ".xml": _rdfReader = new RdfXmlParser(); _rdfWriter = new RdfXmlWriter(); break; case ".n3": _rdfReader = new Notation3Parser(); _rdfWriter = new Notation3Writer(); break; case ".trix": _storeReader = new TriXParser(); _storeWriter = new TriXWriter(); break; case ".json": _rdfReader = new RdfJsonParser(); _rdfWriter = new RdfJsonWriter(); break; default: throw new ArgumentOutOfRangeException(System.String.Format("Provided file path does not allow to detect a type of the RDF serialization type.")); } }