public void RunVocab(String[] args) { if (args.Length < 2) { Console.Error.WriteLine("rdfWebDeploy: Error: 2 Arguments are required in order to use the -vocab mode"); return; } if (File.Exists(args[1])) { Console.Error.WriteLine("rdfWebDeploy: Error: Cannot output the configuration vocabulary to " + args[1] + " as a file already exists at that location"); return; } TurtleParser ttlparser = new TurtleParser(); StreamReader reader = new StreamReader(Assembly.GetAssembly(typeof(IGraph)).GetManifestResourceStream("VDS.RDF.Configuration.configuration.ttl")); Graph g = new Graph(); ttlparser.Load(g, reader); IRdfWriter writer; try { writer = MimeTypesHelper.GetWriter(MimeTypesHelper.GetMimeType(Path.GetExtension(args[1]))); } catch (RdfWriterSelectionException) { writer = new CompressingTurtleWriter(WriterCompressionLevel.High); } writer.Save(g, args[1]); Console.WriteLine("rdfWebDeploy: Configuration Vocabulary output to " + args[1]); }
public static void Dump(IGraph graph) { CompressingTurtleWriter turtleWriter = new CompressingTurtleWriter(); turtleWriter.DefaultNamespaces.AddNamespace("nuget", new Uri("http://nuget.org/schema#")); turtleWriter.PrettyPrintMode = true; turtleWriter.CompressionLevel = 10; turtleWriter.Save(graph, Console.Out); }
public void WritingRdfCollections() { Graph g = new Graph(); FileLoader.Load(g, "resources\\swrc.owl"); CompressingTurtleWriter ttlwriter = new CompressingTurtleWriter(WriterCompressionLevel.High); ttlwriter.Save(g, Console.Out); }
public void Remove() { this._g.Retract(this._g.GetTriplesWithSubject(this._objNode)); if (this._file != null) { CompressingTurtleWriter ttlwriter = new CompressingTurtleWriter(); ttlwriter.Save(this._g, this._file); } }
public void WritingCollections() { Graph g = new Graph(); Options.UriLoaderCaching = false; UriLoader.Load(g, new Uri("http://www.wurvoc.org/vocabularies/om-1.6/Kelvin_scale")); CompressingTurtleWriter ttlwriter = new CompressingTurtleWriter(WriterCompressionLevel.High); ttlwriter.Save(g, Console.Out); }
static void Test6() { JObject json = (JObject)Load("Test6.json"); IGraph graph = new Graph(); JsonLd2Graph.Load(graph, json); CompressingTurtleWriter turtle = new CompressingTurtleWriter(); turtle.CompressionLevel = 0; turtle.Save(graph, Console.Out); }
public void WritingCollections() { Graph g = new Graph(); #if !NO_URICACHE Options.UriLoaderCaching = false; #endif UriLoader.Load(g, new Uri("http://www.wurvoc.org/vocabularies/om-1.6/Kelvin_scale")); CompressingTurtleWriter ttlwriter = new CompressingTurtleWriter(WriterCompressionLevel.High); #if PORTABLE var tmpWriter = new StreamWriter(new MemoryStream()); ttlwriter.Save(g, tmpWriter); #else ttlwriter.Save(g, Console.Out); #endif }
public void ParsingRdfABadSyntax() { RdfAParser parser = new RdfAParser(); Graph g = new Graph(); Console.WriteLine("Tests parsing a file which has much invalid RDFa syntax in it, some triples will be produced (6-8) but most of the triples are wrongly encoded and will be ignored"); g.BaseUri = new Uri("http://www.wurvoc.org/vocabularies/om-1.6/Kelvin_scale"); FileLoader.Load(g, "bad_rdfa.html"); Console.WriteLine(g.Triples.Count + " Triples"); foreach (Triple t in g.Triples) { Console.WriteLine(t.ToString()); } Console.WriteLine(); CompressingTurtleWriter ttlwriter = new CompressingTurtleWriter(WriterCompressionLevel.High); ttlwriter.HighSpeedModePermitted = false; ttlwriter.Save(g, "test.ttl"); }
static void Test0() { Console.WriteLine("JsonLDIntegrationTests.Test0"); IGraph g = new Graph(); TurtleParser parser = new TurtleParser(); parser.Load(g, "datatypes.test.ttl"); System.IO.StringWriter stringWriter = new System.IO.StringWriter(); JToken frame; using (JsonReader reader = new JsonTextReader(new StreamReader("datatypes.context.json"))) { frame = JToken.Load(reader); } JsonLdWriter jsonLdWriter = new JsonLdWriter(); jsonLdWriter.Save(g, stringWriter); JToken flattened = JToken.Parse(stringWriter.ToString()); JObject framed = JsonLdProcessor.Frame(flattened, frame, new JsonLdOptions()); JObject compacted = JsonLdProcessor.Compact(framed, framed["@context"], new JsonLdOptions()); Console.WriteLine(compacted); JToken flattened2 = JsonLdProcessor.Flatten(compacted, new JsonLdOptions()); IGraph g2 = new Graph(); JsonLdReader jsonLdReader = new JsonLdReader(); jsonLdReader.Load(g2, new StringReader(flattened2.ToString())); CompressingTurtleWriter turtleWriter = new CompressingTurtleWriter(); turtleWriter.DefaultNamespaces.AddNamespace("ns", new Uri("http://tempuri.org/schema#")); turtleWriter.Save(g2, Console.Out); }
//protected virtual Uri CreateIndexEntry(CatalogItem item, Uri resourceUri, Guid commitId, DateTime commitTimeStamp) //{ // Uri tmpUri = GetTempUri("catalogindexpage", "ttl"); // using (IGraph pageContent = item.CreatePageContent(Context)) // { // AddCatalogEntryData(pageContent, item.GetItemType(), resourceUri, commitId, commitTimeStamp); // SaveGraph(pageContent, tmpUri).Wait(); // } // return tmpUri; //} private async Task SaveGraph(IGraph graph, Uri uri, CancellationToken cancellationToken) { StringBuilder sb = new StringBuilder(); using (var stringWriter = new System.IO.StringWriter(sb)) { CompressingTurtleWriter turtleWriter = new CompressingTurtleWriter(); turtleWriter.Save(graph, stringWriter); } StorageContent content = new StringStorageContent(sb.ToString(), "application/json", "no-store"); await Storage.Save(uri, content, cancellationToken); }
public void WritingCollectionCompressionNamedListNodes3() { Graph g = new Graph(); INode data1 = g.CreateBlankNode(); g.Assert(data1, g.CreateUriNode(new Uri("http://property")), g.CreateLiteralNode("test1")); INode data2 = g.CreateBlankNode(); g.Assert(data2, g.CreateUriNode(new Uri("http://property")), g.CreateLiteralNode("test2")); INode listEntry1 = g.CreateUriNode(new Uri("http://test/1")); INode rdfFirst = g.CreateUriNode(new Uri(RdfSpecsHelper.RdfListFirst)); INode rdfRest = g.CreateUriNode(new Uri(RdfSpecsHelper.RdfListRest)); INode rdfNil = g.CreateUriNode(new Uri(RdfSpecsHelper.RdfListNil)); g.Assert(listEntry1, rdfFirst, data1); g.Assert(listEntry1, rdfRest, rdfNil); INode listEntry2 = g.CreateUriNode(new Uri("http://test/2")); g.Assert(listEntry2, rdfFirst, data2); g.Assert(listEntry2, rdfRest, listEntry1); INode root = g.CreateUriNode(new Uri("http://root")); g.Assert(root, g.CreateUriNode(new Uri("http://list")), listEntry2); NTriplesFormatter formatter = new NTriplesFormatter(); Console.WriteLine("Original Graph"); foreach (Triple t in g.Triples) { Console.WriteLine(t.ToString(formatter)); } Console.WriteLine(); CompressingTurtleWriterContext context = new CompressingTurtleWriterContext(g, Console.Out); WriterHelper.FindCollections(context); Console.WriteLine(context.Collections.Count + " Collections Found"); Console.WriteLine(); System.IO.StringWriter strWriter = new System.IO.StringWriter(); CompressingTurtleWriter writer = new CompressingTurtleWriter(); writer.CompressionLevel = WriterCompressionLevel.High; writer.Save(g, strWriter); Console.WriteLine("Compressed Turtle"); Console.WriteLine(strWriter.ToString()); Console.WriteLine(); Graph h = new Graph(); TurtleParser parser = new TurtleParser(); StringParser.Parse(h, strWriter.ToString()); Console.WriteLine("Graph after Round Trip to Compressed Turtle"); foreach (Triple t in h.Triples) { Console.WriteLine(t.ToString(formatter)); } Assert.AreEqual(g, h, "Graphs should be equal"); }
public void WritingCollectionCompressionNamedListNodes3() { Graph g = new Graph(); INode data1 = g.CreateBlankNode(); g.Assert(data1, g.CreateUriNode(new Uri("http://property")), g.CreateLiteralNode("test1")); INode data2 = g.CreateBlankNode(); g.Assert(data2, g.CreateUriNode(new Uri("http://property")), g.CreateLiteralNode("test2")); INode listEntry1 = g.CreateUriNode(new Uri("http://test/1")); INode rdfFirst = g.CreateUriNode(new Uri(RdfSpecsHelper.RdfListFirst)); INode rdfRest = g.CreateUriNode(new Uri(RdfSpecsHelper.RdfListRest)); INode rdfNil = g.CreateUriNode(new Uri(RdfSpecsHelper.RdfListNil)); g.Assert(listEntry1, rdfFirst, data1); g.Assert(listEntry1, rdfRest, rdfNil); INode listEntry2 = g.CreateUriNode(new Uri("http://test/2")); g.Assert(listEntry2, rdfFirst, data2); g.Assert(listEntry2, rdfRest, listEntry1); INode root = g.CreateUriNode(new Uri("http://root")); g.Assert(root, g.CreateUriNode(new Uri("http://list")), listEntry2); NTriplesFormatter formatter = new NTriplesFormatter(); _output.WriteLine("Original Graph"); foreach (Triple t in g.Triples) { _output.WriteLine(t.ToString(formatter)); } _output.WriteLine(""); var sw = new System.IO.StringWriter(); CompressingTurtleWriterContext context = new CompressingTurtleWriterContext(g, sw); WriterHelper.FindCollections(context); _output.WriteLine(sw.ToString()); _output.WriteLine(context.Collections.Count + " Collections Found"); _output.WriteLine(""); System.IO.StringWriter strWriter = new System.IO.StringWriter(); CompressingTurtleWriter writer = new CompressingTurtleWriter(); writer.CompressionLevel = WriterCompressionLevel.High; writer.Save(g, strWriter); _output.WriteLine("Compressed Turtle"); _output.WriteLine(strWriter.ToString()); _output.WriteLine(""); Graph h = new Graph(); TurtleParser parser = new TurtleParser(); StringParser.Parse(h, strWriter.ToString()); _output.WriteLine("Graph after Round Trip to Compressed Turtle"); foreach (Triple t in h.Triples) { _output.WriteLine(t.ToString(formatter)); } Assert.Equal(g, h); }
public void WritingCollections() { Graph g = new Graph(); Options.UriLoaderCaching = false; UriLoader.Load(g, new Uri("http://www.wurvoc.org/vocabularies/om-1.6/Kelvin_scale")); CompressingTurtleWriter ttlwriter = new CompressingTurtleWriter(WriterCompressionLevel.High); Console.WriteLine(StringWriter.Write(g, ttlwriter)); }
private int ProcessEvaluationTest(SparqlQueryParser parser, Triple commentDef, String queryFile, String dataFile, List<String> dataFiles, String resultFile) { Console.WriteLine("# Processing Evaluation Test " + Path.GetFileName(queryFile)); if (commentDef != null) { Console.WriteLine(commentDef.Object.ToString()); Console.WriteLine(); } if (dataFiles.Contains(dataFile)) dataFiles.Remove(dataFile); if (queryFile.StartsWith("file:///")) queryFile = queryFile.Substring(8); if (dataFile != null && dataFile.StartsWith("file:///")) dataFile = dataFile.Substring(8); if (resultFile.StartsWith("file:///")) resultFile = resultFile.Substring(8); Console.WriteLine("Query File is " + queryFile); if (evaluationTestOverride.Any(x => queryFile.EndsWith(x))) { Console.WriteLine(); Console.WriteLine("# Test Result = Manually overridden to Pass (Test Passed)"); testsPassed++; testsEvaluationPassed++; return 1; } if (dataFile != null) Console.WriteLine("Default Graph File is " + dataFile); foreach (String file in dataFiles) { Console.WriteLine("Uses Named Graph File " + file); } Console.WriteLine("Expected Result File is " + resultFile); Console.WriteLine(); SparqlQuery query; try { query = parser.ParseFromFile(queryFile); Console.WriteLine(query.ToString()); Console.WriteLine(); Console.WriteLine("Formatted with SparqlFormatter"); SparqlFormatter formatter = new SparqlFormatter(query.NamespaceMap); Console.WriteLine(formatter.Format(query)); Console.WriteLine(); try { Console.WriteLine(query.ToAlgebra().ToString()); Console.WriteLine(); } catch { //Do Nothing } } catch (RdfParseException parseEx) { this.ReportError("Query Parser Error", parseEx); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result = Unable to parse query (Test Failed)"); return -1; } IInMemoryQueryableStore store; if (dataFile != null) { store = new TripleStore(); } else { store = new WebDemandTripleStore(); } //Load Default Graph Graph defaultGraph = new Graph(); try { if (dataFile != null) { FileLoader.Load(defaultGraph, dataFile); } store.Add(defaultGraph); } catch (RdfParseException parseEx) { this.ReportError("Parser Error", parseEx); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result = Unable to parse Default Graph (Test Failed)"); return -1; } //Load Named Graphs try { foreach (String graphFile in dataFiles) { Graph namedGraph = new Graph(); if (graphFile.StartsWith("file:///")) { FileLoader.Load(namedGraph, graphFile.Substring(8)); } else { FileLoader.Load(namedGraph, graphFile); } store.Add(namedGraph); } } catch (RdfParseException parseEx) { this.ReportError("Parser Error", parseEx); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Unable to parse Named Graph (Test Failed)"); return -1; } //Create a Dataset and then Set Graphs InMemoryDataset dataset = new InMemoryDataset(store); if (!query.DefaultGraphs.Any()) { query.AddDefaultGraph(defaultGraph.BaseUri); //dataset.SetActiveGraph(defaultGraph.BaseUri); } if (!query.NamedGraphs.Any()) { foreach (String namedGraphUri in dataFiles) { query.AddNamedGraph(new Uri(namedGraphUri)); } } //Try and get the result Object results = null; try { results = query.Evaluate(dataset); } catch (RdfQueryException queryEx) { this.ReportError("Query Error", queryEx); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Query execution failed (Test Failed)"); return -1; } catch (Exception ex) { this.ReportError("Other Error", ex); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Query failed (Test Failed)"); return -1; } if (results == null) { testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - No result was returned from the Query (Test Failed)"); return -1; } //Load in the expected results if (results is SparqlResultSet) { //Save our Results so we can manually compare as needed SparqlResultSet ourResults = (SparqlResultSet)results; SparqlXmlWriter writer = new SparqlXmlWriter(); writer.Save(ourResults, resultFile + ".out"); SparqlResultSet expectedResults = new SparqlResultSet(); if (resultFile.EndsWith(".srx")) { try { SparqlXmlParser resultSetParser = new SparqlXmlParser(); resultSetParser.Load(expectedResults, resultFile); } catch (RdfParseException parseEx) { this.ReportError("Result Set Parser Error", parseEx); testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Error loading expected Result Set (Test Indeterminate)"); return 0; } } else if (resultFile.EndsWith(".ttl") || resultFile.EndsWith(".rdf")) { try { SparqlRdfParser resultSetParser = new SparqlRdfParser(); resultSetParser.Load(expectedResults, resultFile); } catch (RdfParseException parseEx) { this.ReportError("Result Set Parser Error", parseEx); testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Error loading expected Result Set (Test Indeterminate)"); return 0; } } else { testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Unable to load the expected Result Set (Test Indeterminate)"); return 0; } try { ourResults.Trim(); expectedResults.Trim(); if (ourResults.Equals(expectedResults)) { testsPassed++; testsEvaluationPassed++; Console.WriteLine("# Test Result - Result Set as expected (Test Passed)"); return 1; } else { Console.WriteLine("Final Query"); Console.WriteLine(query.ToString()); Console.WriteLine(); this.ShowTestData(store); this.ShowResultSets(ourResults, expectedResults); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Result Set not as expected (Test Failed)"); return -1; } } catch (NotImplementedException) { this.ShowResultSets(ourResults, expectedResults); testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Unable to establish if Result Set was as expected (Test Indeterminate)"); return 0; } } else if (results is Graph) { if (resultFile.EndsWith(".ttl")) { //Save our Results so we can manually compare as needed Graph ourResults = (Graph)results; CompressingTurtleWriter writer = new CompressingTurtleWriter(); writer.Save(ourResults, resultFile + ".out"); try { Graph expectedResults = new Graph(); TurtleParser ttlparser = new TurtleParser(); ttlparser.Load(expectedResults, resultFile); try { if (ourResults.Equals(expectedResults)) { testsPassed++; testsEvaluationPassed++; Console.WriteLine("# Test Result - Graph as expected (Test Passed)"); return 1; } else { this.ShowTestData(store); this.ShowGraphs(ourResults, expectedResults); testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Graph not as expected (Test Failed)"); return -1; } } catch (NotImplementedException) { this.ShowGraphs(ourResults, expectedResults); testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Unable to establish if Graph was as expected (Test Indeterminate)"); return 0; } } catch (RdfParseException parseEx) { this.ReportError("Graph Parser Error", parseEx); testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Error loading expected Graph (Test Indeterminate)"); return 0; } } else { testsIndeterminate++; testsEvaluationIndeterminate++; Console.WriteLine("# Test Result - Unable to load expected Graph (Test Indeterminate)"); return 0; } } else { testsFailed++; testsEvaluationFailed++; Console.WriteLine("# Test Result - Didn't produce a Graph as expected (Test Failed)"); return -1; } }
/// <summary> /// Saves a Graph to a 4store instance (Warning: Completely replaces any existing Graph with the same URI) /// </summary> /// <param name="g">Graph to save</param> /// <remarks> /// <para> /// Completely replaces any existing Graph with the same Uri in the store /// </para> /// <para> /// Attempting to save a Graph which doesn't have a Base Uri will result in an error /// </para> /// </remarks> /// <exception cref="RdfStorageException">Thrown if you try and save a Graph without a Base Uri or if there is an error communicating with the 4store instance</exception> public void SaveGraph(IGraph g) { try { #if !NO_RWLOCK this._lockManager.EnterWriteLock(); #endif //Set up the Request HttpWebRequest request; if (g.BaseUri != null) { request = (HttpWebRequest)WebRequest.Create(this._baseUri + "data/" + Uri.EscapeUriString(g.BaseUri.ToString())); } else { throw new RdfStorageException("Cannot save a Graph without a Base URI to a 4store Server"); } request.Method = "PUT"; request.ContentType = MimeTypesHelper.Turtle[0]; //Write the Graph as Turtle to the Request Stream CompressingTurtleWriter writer = new CompressingTurtleWriter(WriterCompressionLevel.High); writer.Save(g, new StreamWriter(request.GetRequestStream())); #if DEBUG if (Options.HttpDebugging) { Tools.HttpDebugRequest(request); } #endif //Make the Request using (HttpWebResponse response = (HttpWebResponse)request.GetResponse()) { #if DEBUG if (Options.HttpDebugging) { Tools.HttpDebugResponse(response); } #endif //If we get here then it was OK response.Close(); } } catch (WebException webEx) { #if DEBUG if (Options.HttpDebugging) { if (webEx.Response != null) Tools.HttpDebugResponse((HttpWebResponse)webEx.Response); } #endif throw new RdfStorageException("A HTTP error occurred while communicating with the 4store Server", webEx); } finally { #if !NO_RWLOCK this._lockManager.ExitWriteLock(); #endif } }
private INode AddConnection(IGraph config, IGenericIOManager manager, String persistentFile) { if (config == null) return null; ConfigurationSerializationContext context = new ConfigurationSerializationContext(config); if (manager is IConfigurationSerializable) { INode objNode = context.Graph.CreateUriNode(new Uri("dotnetrdf:storemanager:" + DateTime.Now.ToString("yyyyMMddhhmmss"))); context.NextSubject = objNode; ((IConfigurationSerializable)manager).SerializeConfiguration(context); if (persistentFile != null) { try { //Persist the graph to disk CompressingTurtleWriter ttlwriter = new CompressingTurtleWriter(); ttlwriter.Save(config, persistentFile); } catch (Exception ex) { MessageBox.Show("Unable to persist a Connections File to disk", "Internal Error", MessageBoxButtons.OK, MessageBoxIcon.Information); } } return objNode; } return null; }
private void AddRecentConnection(IGenericIOManager manager) { INode objNode = this.AddConnection(this._recentConnections, manager, this._recentConnectionsFile); if (objNode != null) { ToolStripMenuItem item = new ToolStripMenuItem(); item.Text = manager.ToString(); item.Tag = new QuickConnect(this._recentConnections, objNode); item.Click += new EventHandler(QuickConnectClick); this.mnuRecentConnections.DropDownItems.Add(item); } //Check the number of Recent Connections and delete the Oldest if more than 9 List<INode> conns = this._recentConnections.GetTriplesWithPredicateObject(this._recentConnections.CreateUriNode(new Uri(RdfSpecsHelper.RdfType)), this._recentConnections.CreateUriNode(new Uri(ConfigurationLoader.ConfigurationNamespace + ConfigurationLoader.ClassGenericManager.Substring(ConfigurationLoader.ClassGenericManager.IndexOf(':') + 1)))).Select(t => t.Subject).ToList(); if (conns.Count > MaxRecentConnections) { conns.Sort(); conns.Reverse(); conns.RemoveRange(0, MaxRecentConnections); foreach (INode obj in conns) { this._recentConnections.Retract(this._recentConnections.GetTriplesWithSubject(obj)); this.RemoveFromConnectionsMenu(this.mnuRecentConnections, obj); } try { //Persist the graph to disk CompressingTurtleWriter ttlwriter = new CompressingTurtleWriter(); ttlwriter.Save(this._recentConnections, this._recentConnectionsFile); } catch (Exception ex) { MessageBox.Show("Unable to persist a Connections File to disk", "Internal Error", MessageBoxButtons.OK, MessageBoxIcon.Information); } } }
private void mnuSaveConnection_Click(object sender, EventArgs e) { if (this.ActiveMdiChild != null) { if (this.ActiveMdiChild is fclsGenericStoreManager || this.ActiveMdiChild is fclsSQLStoreManager) { Object manager; if (this.ActiveMdiChild is fclsGenericStoreManager) { manager = ((fclsGenericStoreManager)this.ActiveMdiChild).Manager; } else if (this.ActiveMdiChild is fclsSQLStoreManager) { manager = ((fclsSQLStoreManager)this.ActiveMdiChild).Manager; if (manager == null) return; } else { return; } if (manager is IConfigurationSerializable) { this.sfdConnection.Filter = Constants.RdfFilter; if (this.sfdConnection.ShowDialog() == DialogResult.OK) { //Append to existing configuration file or overwrite? ConfigurationSerializationContext context; if (File.Exists(this.sfdConnection.FileName)) { DialogResult result = MessageBox.Show("The selected connection file already exists - would you like to append this connection to that file? Click Yes to append to this file, No to overwrite and Cancel to abort", "Append Connection?", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Question); switch (result) { case DialogResult.Yes: Graph g = new Graph(); FileLoader.Load(g, this.sfdConnection.FileName); context = new ConfigurationSerializationContext(g); break; case DialogResult.No: context = new ConfigurationSerializationContext(); break; default: return; } } else { //Create new configuration file context = new ConfigurationSerializationContext(); } //Save the Connection ((IConfigurationSerializable)manager).SerializeConfiguration(context); try { IRdfWriter writer = MimeTypesHelper.GetWriter(MimeTypesHelper.GetMimeType(Path.GetExtension(this.sfdConnection.FileName))); writer.Save(context.Graph, this.sfdConnection.FileName); } catch (RdfWriterSelectionException) { CompressingTurtleWriter ttlwriter = new CompressingTurtleWriter(WriterCompressionLevel.High); ttlwriter.Save(context.Graph, this.sfdConnection.FileName); } } } else { MessageBox.Show("Unable to save the current connection as it does not support this feature", "Save Unavailable", MessageBoxButtons.OK, MessageBoxIcon.Information); } } else { this.mnuSaveConnection.Enabled = false; } } else { this.mnuSaveConnection.Enabled = false; } }