public void SparqlSequenceUpdateThenQuery2() { InMemoryDataset dataset = new InMemoryDataset(); LeviathanUpdateProcessor updateProcessor = new LeviathanUpdateProcessor(dataset); LeviathanQueryProcessor queryProcessor = new LeviathanQueryProcessor(dataset); Assert.Equal(1, dataset.Graphs.Count()); SparqlUpdateCommandSet updates = this._updateParser.ParseFromFile("resources\\sparql\\protocol\\update_dataset_default_graphs.ru"); updateProcessor.ProcessCommandSet(updates); Assert.Equal(5, dataset.Graphs.Count()); Assert.Equal(2, dataset[UriFactory.Create("http://example.org/protocol-update-dataset-graphs-test/")].Triples.Count()); SparqlQuery query = this._queryParser.ParseFromFile("resources\\sparql\\protocol\\update_dataset_default_graphs.rq"); ISparqlAlgebra algebra = query.ToAlgebra(); Console.WriteLine(algebra.ToString()); SparqlResultSet results = queryProcessor.ProcessQuery(query) as SparqlResultSet; Assert.NotNull(results); Assert.Equal(SparqlResultsType.Boolean, results.ResultsType); Assert.True(results.Result); }
public void SparqlUpdate() { TripleStore store = new TripleStore(); SparqlUpdateParser parser = new SparqlUpdateParser(); //Generate a Command SparqlParameterizedString cmdString = new SparqlParameterizedString(); cmdString.CommandText = "LOAD <http://dbpedia.org/resource/Southampton> INTO <http://example.org/Soton>"; //Parse the command into a SparqlUpdateCommandSet SparqlUpdateCommandSet cmds = parser.ParseFromString(cmdString); //Create an Update Processor using our dataset and apply the updates LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(store); processor.ProcessCommandSet(cmds); //We should now have a Graph in our dataset as a result of the LOAD update //So we'll retrieve this and print it to the Console IGraph g = store.Graphs[new Uri("http://example.org/Soton")]; NTriplesFormatter formatter = new NTriplesFormatter(); foreach (Triple t in g.Triples) { Console.WriteLine(t.ToString(formatter)); } }
public void SparqlDatasetListGraphs() { InMemoryDataset dataset = new InMemoryDataset(new TripleStore()); LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(dataset); Assert.IsTrue(dataset.GraphUris.Count() == 1, "Should be 1 Graph as the Update Processor should ensure a Default unnamed Graph exists"); }
private void TestUpdate(IInMemoryQueryableStore store, IGraph expected, String update) { SparqlUpdateCommandSet cmds = this._updateParser.ParseFromString(update); LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(store); processor.ProcessCommandSet(cmds); Assert.True(store.HasGraph(null), "Store should have a default unnamed Graph"); IGraph result = store[null]; NTriplesFormatter formatter = new NTriplesFormatter(); Console.WriteLine("Result Data"); foreach (Triple t in result.Triples) { Console.WriteLine(t.ToString(formatter)); } Console.WriteLine(); Console.WriteLine("Expected Data"); foreach (Triple t in expected.Triples) { Console.WriteLine(t.ToString(formatter)); } Console.WriteLine(); Assert.Equal(expected, result); }
/// <inheritdoc /> void IUpdateableTripleStore.ExecuteUpdate(SparqlUpdateCommandSet updates) { lock (Locker) { LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(this); processor.ProcessCommandSet(updates); Write(); } }
public void Commit(SparqlParameterizedString updateTasks) { if (updateTasks != null) { var processor = new LeviathanUpdateProcessor(_dataset); var sparqlParser = new SparqlUpdateParser(); AddAllColidNamespaces(updateTasks); var query = sparqlParser.ParseFromString(updateTasks); processor.ProcessCommandSet(query); } }
/// <summary> /// Applies SPARQL Updates to the Store. /// </summary> /// <param name="sparqlUpdate">SPARQL Update.</param> public void Update(String sparqlUpdate) { if (_updateParser == null) { _updateParser = new SparqlUpdateParser(); } SparqlUpdateCommandSet cmds = _updateParser.ParseFromString(sparqlUpdate); if (_updateProcessor == null) { _updateProcessor = new LeviathanUpdateProcessor(_dataset); } _updateProcessor.ProcessCommandSet(cmds); }
private void SparqlMathTest() { const string update = @" DELETE { ?s ?p ?o. } INSERT { ?s ?p ?a. } WHERE { ?s ?p ?o. BIND(?o / 2 AS ?a) } "; const string query = @"SELECT * WHERE { ?s ?p ?o.}"; var graph = new Graph(); graph.Assert(graph.CreateBlankNode(), graph.CreateBlankNode(), 0.5.ToLiteral(graph)); Assert.Equal(0.5, graph.Triples.Select(t => t.Object.AsValuedNode().AsDouble()).First()); var store = new TripleStore(); store.Add(graph); var queryProcessor = new LeviathanQueryProcessor(store); var queryParser = new SparqlQueryParser(); var q = queryParser.ParseFromString(query); var result = q.Process(queryProcessor) as SparqlResultSet; var oNode = result.First()["o"]; Assert.Equal("0.5", ((ILiteralNode)oNode).Value); q = queryParser.ParseFromString("SELECT ?a WHERE { ?s ?p ?o. BIND(?o / 2 AS ?a) }"); //q = queryParser.ParseFromString("SELECT ?a WHERE { BIND (0.25 as ?a) }"); result = q.Process(queryProcessor) as SparqlResultSet; oNode = result.First()["a"]; var o = oNode.AsValuedNode().AsDouble(); Assert.Equal(0.25, o); Assert.Equal("0.25", ((ILiteralNode)oNode).Value); var updateProcessor = new LeviathanUpdateProcessor(store); var updateParser = new SparqlUpdateParser(); updateParser.ParseFromString(update).Process(updateProcessor); result = queryParser.ParseFromString(query).Process(queryProcessor) as SparqlResultSet; oNode = result.First()["o"]; //Assert.Equal("0.25", ((ILiteralNode)oNode).Value); o = oNode.AsValuedNode().AsDouble(); Assert.Equal(0.25, o); //@object is 25 when using German (Germany) regional format //@object is 0.25 when using English (United Kingdom) regional format }
public void UpdateTripleStore(SparqlParameterizedString updateString) { if (_transaction != null) { _transaction.AddUpdateString(updateString); } else { var processor = new LeviathanUpdateProcessor(_dataset); var sparqlParser = new SparqlUpdateParser(); AddAllColidNamespaces(updateString); var query = sparqlParser.ParseFromString(updateString); processor.ProcessCommandSet(query); } }
public void Setup() { SetupNamespaceMap(); _tripleStore = new TripleStore(); _defaultGraph = CreateGraph(null); _inferences = CreateGraph(new Uri("http://industrialinference.com/inferred/")); _contradictions = CreateGraph(new Uri("http://industrialinference.com/contradictions/")); _tripleStore.Add(_defaultGraph); _tripleStore.Add(_inferences); _tripleStore.Add(_contradictions); var qp = new LeviathanQueryProcessor(_tripleStore); var up = new LeviathanUpdateProcessor(_tripleStore); var builder = new ContainerBuilder(); builder.RegisterType <InferenceEngine>().AsImplementedInterfaces(); builder.Register <ISparqlQueryProcessor>(_ => qp); builder.Register <ISparqlUpdateProcessor>(_ => up); _container = builder.Build(); }
public void SparqlDatasetDefaultGraphManagementWithUpdate() { TripleStore store = new TripleStore(); Graph g = new Graph(); store.Add(g); Graph h = new Graph(); h.BaseUri = new Uri("http://example.org/someOtherGraph"); store.Add(h); InMemoryDataset dataset = new InMemoryDataset(store, h.BaseUri); LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(dataset); SparqlUpdateParser parser = new SparqlUpdateParser(); SparqlUpdateCommandSet cmds = parser.ParseFromString("LOAD <http://www.dotnetrdf.org/configuration#>"); processor.ProcessCommandSet(cmds); Assert.True(g.IsEmpty, "Graph with null URI (normally the default Graph) should be empty as the Default Graph for the Dataset should have been a named Graph so this Graph should not have been filled by the LOAD Command"); Assert.False(h.IsEmpty, "Graph with name should be non-empty as it should have been the Default Graph for the Dataset and so filled by the LOAD Command"); }
public void SparqlDatasetDefaultGraphManagementWithUpdate2() { TripleStore store = new TripleStore(); Graph g = new Graph(); g.BaseUri = new Uri("http://example.org/graph"); store.Add(g); Graph h = new Graph(); h.BaseUri = new Uri("http://example.org/someOtherGraph"); store.Add(h); InMemoryDataset dataset = new InMemoryDataset(store, h.BaseUri); LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(dataset); SparqlUpdateParser parser = new SparqlUpdateParser(); SparqlUpdateCommandSet cmds = parser.ParseFromString("LOAD <http://www.dotnetrdf.org/configuration#> INTO GRAPH <http://example.org/graph>"); processor.ProcessCommandSet(cmds); Assert.False(g.IsEmpty, "First Graph should not be empty as should have been filled by the LOAD command"); Assert.True(h.IsEmpty, "Second Graph should be empty as should not have been filled by the LOAD command"); }
/// <summary> /// Elimina los triples http://purl.org/roh/mirror/foaf#primaryTopic del RDF a cargar y los triples que tenían cagados en la BBDD /// </summary> /// <param name="pDataGraph">Grafo con los datos a cargar</param> /// <returns>Lista de grafos afectados</returns> private HashSet <string> RemovePrimaryTopics(ref RohGraph pDataGraph) { HashSet <string> graphs = new HashSet <string>(); List <string> mainEntities = new List <string>(); string query = @"select distinct * where{?s <http://purl.org/roh/mirror/foaf#primaryTopic> ""true""^^<http://www.w3.org/2001/XMLSchema#boolean>}"; SparqlResultSet sparqlResultSet = (SparqlResultSet)pDataGraph.ExecuteQuery(query.ToString()); foreach (SparqlResult sparqlResult in sparqlResultSet.Results) { mainEntities.Add(sparqlResult["s"].ToString()); } //Se eliminan todas las referncias a las entidades principales if (mainEntities.Count > 0) { foreach (string mainEntity in mainEntities) { graphs.UnionWith(DeleteUpdatedEntity(mainEntity)); } } //Eliminamos el triple que marca las entidades principales para que no se inserte en la BBDD { TripleStore store = new TripleStore(); store.Add(pDataGraph); SparqlUpdateParser parser = new SparqlUpdateParser(); //Actualizamos los sujetos SparqlUpdateCommandSet updateSubject = parser.ParseFromString( @" DELETE { ?s ?p ?o. } WHERE { ?s ?p ?o. FILTER(?p =<http://purl.org/roh/mirror/foaf#primaryTopic>) }"); LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(store); processor.ProcessCommandSet(updateSubject); } return(graphs); }
public void SparqlDatasetDefaultGraphManagementWithUpdate5() { TripleStore store = new TripleStore(); Graph g = new Graph(); g.BaseUri = new Uri("http://example.org/graph"); store.Add(g); Graph h = new Graph(); h.BaseUri = new Uri("http://example.org/someOtherGraph"); store.Add(h); InMemoryDataset dataset = new InMemoryDataset(store, h.BaseUri); LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(dataset); SparqlUpdateParser parser = new SparqlUpdateParser(); SparqlUpdateCommandSet cmds = parser.ParseFromString("LOAD <http://www.dotnetrdf.org/configuration#>; WITH <http://example.org/graph> INSERT { ?s a ?type } USING <http://example.org/someOtherGraph> WHERE { ?s a ?type }; DELETE WHERE { ?s a ?type }"); processor.ProcessCommandSet(cmds); Assert.False(g.IsEmpty, "First Graph should not be empty as should have been filled by the INSERT command"); Assert.False(h.IsEmpty, "Second Graph should not be empty as should not have been filled by the LOAD command"); Assert.False(h.HasSubGraph(g), "First Graph should not be a subgraph of the Second Graph as the DELETE should have eliminated the subgraph relationship"); }
private void SparqlQueryAndUpdateThreadSafeEvaluationActual() { String query1 = "CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <http://example.org/1> { ?s ?p ?o } }"; String query2 = "CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <http://example.org/2> { ?s ?p ?o } }"; String query3 = "CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <http://example.org/3> { ?s ?p ?o } }"; String update1 = "INSERT DATA { GRAPH <http://example.org/3> { <ex:subj> <ex:pred> <ex:obj> } }"; SparqlQuery q1 = this._parser.ParseFromString(query1); SparqlQuery q2 = this._parser.ParseFromString(query2); SparqlQuery q3 = this._parser.ParseFromString(query3); Assert.False(q1.UsesDefaultDataset, "Query 1 should not be thread safe"); Assert.False(q2.UsesDefaultDataset, "Query 2 should not be thread safe"); Assert.False(q3.UsesDefaultDataset, "Query 3 should not be thread safe"); SparqlUpdateParser parser = new SparqlUpdateParser(); SparqlUpdateCommandSet cmds = parser.ParseFromString(update1); InMemoryDataset dataset = new InMemoryDataset(); Graph g = new Graph(); g.LoadFromEmbeddedResource("VDS.RDF.Configuration.configuration.ttl"); g.BaseUri = new Uri("http://example.org/1"); Graph h = new Graph(); h.LoadFromEmbeddedResource("VDS.RDF.Query.Expressions.LeviathanFunctionLibrary.ttl"); h.BaseUri = new Uri("http://example.org/2"); Graph i = new Graph(); i.BaseUri = new Uri("http://example.org/3"); dataset.AddGraph(g); dataset.AddGraph(h); dataset.AddGraph(i); LeviathanQueryProcessor processor = new LeviathanQueryProcessor(dataset); LeviathanUpdateProcessor upProcessor = new LeviathanUpdateProcessor(dataset); QueryWithGraphDelegate d = new QueryWithGraphDelegate(this.QueryWithGraph); RunUpdateDelegate d2 = new RunUpdateDelegate(this.RunUpdate); IAsyncResult r1 = d.BeginInvoke(q1, processor, null, null); IAsyncResult r2 = d.BeginInvoke(q2, processor, null, null); IAsyncResult r3 = d.BeginInvoke(q3, processor, null, null); IAsyncResult r4 = d2.BeginInvoke(cmds, upProcessor, null, null); WaitHandle.WaitAll(new WaitHandle[] { r1.AsyncWaitHandle, r2.AsyncWaitHandle, r3.AsyncWaitHandle, r4.AsyncWaitHandle }); IGraph gQuery = d.EndInvoke(r1); Assert.Equal(g, gQuery); IGraph hQuery = d.EndInvoke(r2); Assert.Equal(h, hQuery); IGraph iQuery = d.EndInvoke(r3); if (iQuery.IsEmpty) { Console.WriteLine("Query 3 executed before the INSERT DATA command - running again to get the resulting graph"); iQuery = this.QueryWithGraph(q3, processor); } else { Console.WriteLine("Query 3 executed after the INSERT DATA command"); } //Test iQuery against an empty Graph Assert.False(iQuery.IsEmpty, "Graph should not be empty as INSERT DATA should have inserted a Triple"); Assert.NotEqual(new Graph(), iQuery); Assert.NotEqual(g, h); }
private bool test1() { Notation3Parser n3parser = new Notation3Parser(); try { //Load using Filename n3parser.Load(g, "szepmuveszeti.n3"); } catch (RdfParseException parseEx) { //This indicates a parser error e.g unexpected character, premature end of input, invalid syntax etc. Console.WriteLine("Parser Error"); Console.WriteLine(parseEx.Message); return(false); } catch (RdfException rdfEx) { //This represents a RDF error e.g. illegal triple for the given syntax, undefined namespace Console.WriteLine("RDF Error"); Console.WriteLine(rdfEx.Message); return(false); } catch (Exception e) { Console.WriteLine(e.Message); return(false); } TripleStore ts = new TripleStore(); ts.Add(g); //Get the Query processor LeviathanQueryProcessor processor = new LeviathanQueryProcessor(ts); try { SparqlQuery q = new SparqlQueryParser().ParseFromString("PREFIX ecrm:<http://erlangen-crm.org/current/> SELECT ?actor {?actor a ecrm:E39_Actor}"); Object results = processor.ProcessQuery(q); Console.WriteLine("First 5 actor:"); if (results is SparqlResultSet) { SparqlResultSet rset = (SparqlResultSet)results; for (int idx = 0; idx < 5; ++idx) { Console.WriteLine(rset.Results[idx]); } } Console.WriteLine("Delete the first actor:"); SparqlUpdateCommandSet delete = new SparqlUpdateParser().ParseFromString("PREFIX ecrm:<http://erlangen-crm.org/current/> DELETE {<http://data.szepmuveszeti.hu/id/collections/museum/E39_Actor/f5f86cb4-b308-34b9-a73b-d40d474d735d> a ecrm:E39_Actor}WHERE{}"); var updateProcessor = new LeviathanUpdateProcessor(ts); updateProcessor.ProcessCommandSet(delete); var result2 = processor.ProcessQuery(q); if (result2 is SparqlResultSet) { SparqlResultSet rset = (SparqlResultSet)result2; for (int idx = 0; idx < 5; ++idx) { Console.WriteLine(rset.Results[idx]); } } SparqlUpdateCommandSet insert = new SparqlUpdateParser().ParseFromString("PREFIX ecrm:<http://erlangen-crm.org/current/> INSERT {<http://data.szepmuveszeti.hu/id/collections/museum/E39_Actor/f5f86cb4-b308-34b9-a73b-d40d474d735d> a ecrm:E39_Actor}WHERE{}"); updateProcessor = new LeviathanUpdateProcessor(ts); updateProcessor.ProcessCommandSet(insert); var result3 = processor.ProcessQuery(q); Console.WriteLine("Insert the first actor:"); if (result3 is SparqlResultSet) { SparqlResultSet rset = (SparqlResultSet)result3; for (int idx = 0; idx < 5; ++idx) { Console.WriteLine(rset.Results[idx]); } } } catch (Exception e) { return(false); } return(true); }
private void SparqlQueryAndUpdateThreadSafeEvaluationActual() { var query1 = "CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <http://example.org/1> { ?s ?p ?o } }"; var query2 = "CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <http://example.org/2> { ?s ?p ?o } }"; var query3 = "CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <http://example.org/3> { ?s ?p ?o } }"; var update1 = "INSERT DATA { GRAPH <http://example.org/3> { <ex:subj> <ex:pred> <ex:obj> } }"; var q1 = _parser.ParseFromString(query1); var q2 = _parser.ParseFromString(query2); var q3 = _parser.ParseFromString(query3); Assert.False(q1.UsesDefaultDataset, "Query 1 should not be thread safe"); Assert.False(q2.UsesDefaultDataset, "Query 2 should not be thread safe"); Assert.False(q3.UsesDefaultDataset, "Query 3 should not be thread safe"); var parser = new SparqlUpdateParser(); var cmds = parser.ParseFromString(update1); var dataset = new InMemoryDataset(); var g = new Graph(); g.LoadFromEmbeddedResource("VDS.RDF.Configuration.configuration.ttl"); g.BaseUri = new Uri("http://example.org/1"); var h = new Graph(); h.LoadFromEmbeddedResource("VDS.RDF.Query.Expressions.LeviathanFunctionLibrary.ttl"); h.BaseUri = new Uri("http://example.org/2"); var i = new Graph { BaseUri = new Uri("http://example.org/3") }; dataset.AddGraph(g); dataset.AddGraph(h); dataset.AddGraph(i); var processor = new LeviathanQueryProcessor(dataset); var upProcessor = new LeviathanUpdateProcessor(dataset); var d = new QueryWithGraphDelegate(QueryWithGraph); var d2 = new RunUpdateDelegate(RunUpdate); var t1 = Task.Factory.StartNew(() => QueryWithGraph(q1, processor)); var t2 = Task.Factory.StartNew(() => QueryWithGraph(q2, processor)); var t3 = Task.Factory.StartNew(() => QueryWithGraph(q3, processor)); var t4 = Task.Factory.StartNew(() => RunUpdate(cmds, upProcessor)); Task.WaitAll(t1, t2, t3, t4); var gQuery = t1.Result; var hQuery = t2.Result; var iQuery = t3.Result; Assert.Equal(g, gQuery); Assert.Equal(h, hQuery); if (iQuery.IsEmpty) { _output.WriteLine("Query 3 executed before the INSERT DATA command - running again to get the resulting graph"); iQuery = QueryWithGraph(q3, processor); } else { _output.WriteLine("Query 3 executed after the INSERT DATA command"); } //Test iQuery against an empty Graph Assert.False(iQuery.IsEmpty, "Graph should not be empty as INSERT DATA should have inserted a Triple"); Assert.NotEqual(new Graph(), iQuery); Assert.NotEqual(g, h); }
/// <summary> /// Tries to load a SPARQL Update based on information from the Configuration Graph /// </summary> /// <param name="g">Configuration Graph</param> /// <param name="objNode">Object Node</param> /// <param name="targetType">Target Type</param> /// <param name="obj">Output Object</param> /// <returns></returns> public bool TryLoadObject(IGraph g, INode objNode, Type targetType, out object obj) { obj = null; ISparqlUpdateProcessor processor = null; INode storeObj; Object temp; INode propStorageProvider = g.CreateUriNode(UriFactory.Create(ConfigurationLoader.PropertyStorageProvider)); switch (targetType.FullName) { case LeviathanUpdateProcessor: INode datasetObj = ConfigurationLoader.GetConfigurationNode(g, objNode, g.CreateUriNode(UriFactory.Create(ConfigurationLoader.PropertyUsingDataset))); if (datasetObj != null) { temp = ConfigurationLoader.LoadObject(g, datasetObj); if (temp is ISparqlDataset) { processor = new LeviathanUpdateProcessor((ISparqlDataset)temp); } else { throw new DotNetRdfConfigurationException("Unable to load the Leviathan Update Processor identified by the Node '" + objNode.ToString() + "' as the value given for the dnr:usingDataset property points to an Object that cannot be loaded as an object which implements the ISparqlDataset interface"); } } else { storeObj = ConfigurationLoader.GetConfigurationNode(g, objNode, g.CreateUriNode(UriFactory.Create(ConfigurationLoader.PropertyUsingStore))); if (storeObj == null) { return(false); } temp = ConfigurationLoader.LoadObject(g, storeObj); if (temp is IInMemoryQueryableStore) { processor = new LeviathanUpdateProcessor((IInMemoryQueryableStore)temp); } else { throw new DotNetRdfConfigurationException("Unable to load the Leviathan Update Processor identified by the Node '" + objNode.ToString() + "' as the value given for the dnr:usingStore property points to an Object that cannot be loaded as an object which implements the IInMemoryQueryableStore interface"); } } break; case SimpleUpdateProcessor: storeObj = ConfigurationLoader.GetConfigurationNode(g, objNode, g.CreateUriNode(UriFactory.Create(ConfigurationLoader.PropertyUsingStore))); if (storeObj == null) { return(false); } temp = ConfigurationLoader.LoadObject(g, storeObj); if (temp is IUpdateableTripleStore) { processor = new SimpleUpdateProcessor((IUpdateableTripleStore)temp); } else { throw new DotNetRdfConfigurationException("Unable to load the Simple Update Processor identified by the Node '" + objNode.ToString() + "' as the value given for the dnr:usingStore property points to an Object that cannot be loaded as an object which implements the IUpdateableTripleStore interface"); } break; case GenericUpdateProcessor: INode managerObj = ConfigurationLoader.GetConfigurationNode(g, objNode, propStorageProvider); if (managerObj == null) { return(false); } temp = ConfigurationLoader.LoadObject(g, managerObj); if (temp is IStorageProvider) { processor = new GenericUpdateProcessor((IStorageProvider)temp); } else { throw new DotNetRdfConfigurationException("Unable to load the Generic Update Processor identified by the Node '" + objNode.ToString() + "' as the value given for the dnr:genericManager property points to an Object that cannot be loaded as an object which implements the IStorageProvider interface"); } break; } obj = processor; return(processor != null); }
/// <summary> /// Procesa los resultados del descubrimiento /// </summary> /// <param name="pDiscoverItem">Objeto con los datos de com procesar el proeso de descubrimiento</param> /// <param name="pDiscoverResult">Resultado de la aplicación del descubrimiento</param> /// <param name="pDiscoverItemBDService">Clase para gestionar las operaciones de las tareas de descubrimiento</param> /// <param name="pCallCronApiService">Servicio para hacer llamadas a los métodos del apiCron</param> /// <param name="pCallUrisFactoryApiService">Servicio para hacer llamadas a los métodos del Uris Factory</param> /// <param name="pProcessDiscoverStateJobBDService">Clase para gestionar los estados de descubrimiento de las tareas</param> /// <returns></returns> public void Process(DiscoverItem pDiscoverItem, DiscoverResult pDiscoverResult, DiscoverItemBDService pDiscoverItemBDService, CallCronApiService pCallCronApiService, CallUrisFactoryApiService pCallUrisFactoryApiService, ProcessDiscoverStateJobBDService pProcessDiscoverStateJobBDService) { #region Cargamos configuraciones ConfigSparql ConfigSparql = new ConfigSparql(); string SGI_SPARQLEndpoint = ConfigSparql.GetEndpoint(); string SGI_SPARQLGraph = ConfigSparql.GetGraph(); string SGI_SPARQLQueryParam = ConfigSparql.GetQueryParam(); string SGI_SPARQLUsername = ConfigSparql.GetUsername(); string SGI_SPARQLPassword = ConfigSparql.GetPassword(); string Unidata_SPARQLEndpoint = ConfigSparql.GetUnidataEndpoint(); string Unidata_SPARQLGraph = ConfigSparql.GetUnidataGraph(); string Unidata_SPARQLQueryParam = ConfigSparql.GetUnidataQueryParam(); string Unidata_SPARQLUsername = ConfigSparql.GetUnidataUsername(); string Unidata_SPARQLPassword = ConfigSparql.GetUnidataPassword(); ConfigService ConfigService = new ConfigService(); string UnidataDomain = ConfigService.GetUnidataDomain(); string UnidataUriTransform = ConfigService.GetUnidataUriTransform(); #endregion /* * En función del resultado obtenido se realiza una de las siguientes acciones: * Si para alguna entidad hay más de un candidato que supere el umbral máximo o hay alguna entidad que supere el umbral mínimo pero no alcance el máximo se agregará el RDF a una BBDD junto con todos los datos necesarios para que el administrador decida como proceder. * Si no estamos en el punto anterior * Se obtienen las entidades principales del RDF y se eliminan todos los triples que haya en la BBDD en los que aparezcan como sujeto u objeto. * Se eliminan todos los triples cuyo sujeto y predicado estén en el RDF a cargar y estén marcados como monovaluados. * Se vuelcan los triples a la BBDD. * * */ if (pDiscoverItem.Publish) { if (pDiscoverResult.discoveredEntitiesProbability.Count > 0) { //Hay dudas en la desambiguación, por lo que lo actualizamos en la BBDD con su estado correspondiente pDiscoverItem.UpdateDissambiguationProblems( pDiscoverResult.discoveredEntitiesProbability, pDiscoverResult.reconciliationData.reconciliatedEntitiesWithBBDD.Values.Select(x => x.uri).Union(pDiscoverResult.reconciliationData.reconciliatedEntitiesWithIds.Values).Union(pDiscoverResult.reconciliationData.reconciliatedEntitiesWithSubject).Union(pDiscoverResult.reconciliationData.reconciliatedEntitiesWithExternalIntegration.Values.Select(x => x.uri)).ToList(), pDiscoverResult.GetDataGraphRDF()); pDiscoverItemBDService.ModifyDiscoverItem(pDiscoverItem); } else { string urlDiscoverAgent = pCallUrisFactoryApiService.GetUri("Agent", "discover"); //Creamos los SameAs hacia unidata para las entidades que NO lo tengan hacia Unidata //TODO descomentar cuando esté habilitaado Unidata //pDiscoverResult.dataGraph = AsioPublication.CreateUnidataSameAs(pDiscoverResult.dataGraph, UnidataDomain, UnidataUriTransform); //Publicamos en el SGI AsioPublication asioPublication = new AsioPublication(SGI_SPARQLEndpoint, SGI_SPARQLQueryParam, SGI_SPARQLGraph, SGI_SPARQLUsername, SGI_SPARQLPassword); asioPublication.PublishRDF(pDiscoverResult.dataGraph, pDiscoverResult.ontologyGraph, new KeyValuePair <string, string>(urlDiscoverAgent, "Algoritmos de descubrimiento"), pDiscoverResult.start, pDiscoverResult.end, pDiscoverResult.discoverLinkData, pCallUrisFactoryApiService); //TODO Lógica nombres de personas SparqlResultSet sparqlResultSet = (SparqlResultSet)pDiscoverResult.dataGraph.ExecuteQuery("select ?s ?name where {?s a <http://purl.org/roh/mirror/foaf#Person>. ?s <http://purl.org/roh/mirror/foaf#name> ?name}"); foreach (SparqlResult sparqlResult in sparqlResultSet.Results) { string s = sparqlResult["s"].ToString(); string nombre = sparqlResult["name"].ToString(); if (sparqlResult["name"] is ILiteralNode) { nombre = ((ILiteralNode)sparqlResult["name"]).Value; } _discoverCacheGlobal.PersonsNormalizedNames[s] = DiscoverUtility.NormalizeName(nombre); } //TODO Lógica titles SparqlResultSet sparqlResultSetTitles = (SparqlResultSet)pDiscoverResult.dataGraph.ExecuteQuery("select * where {?s a ?rdftype. ?s <http://purl.org/roh#title> ?title}"); foreach (SparqlResult sparqlResult in sparqlResultSetTitles.Results) { string s = sparqlResult["s"].ToString(); string rdftype = sparqlResult["rdftype"].ToString(); string title = sparqlResult["title"].ToString(); if (sparqlResult["title"] is ILiteralNode) { title = ((ILiteralNode)sparqlResult["title"]).Value; } title = DiscoverUtility.NormalizeTitle(title); if (!_discoverCacheGlobal.EntitiesNormalizedTitles.ContainsKey(rdftype)) { _discoverCacheGlobal.EntitiesNormalizedTitles.Add(rdftype, new Dictionary <string, HashSet <string> >()); } if (!_discoverCacheGlobal.EntitiesNormalizedTitles[rdftype].ContainsKey(title)) { _discoverCacheGlobal.EntitiesNormalizedTitles[rdftype].Add(title, new HashSet <string>()); } _discoverCacheGlobal.EntitiesNormalizedTitles[rdftype][title].Add(s); } //TODO descomentar cuando esté habilitaado Unidata if (false) { //Publicamos en UNIDATA AsioPublication asioPublicationUnidata = new AsioPublication(Unidata_SPARQLEndpoint, Unidata_SPARQLQueryParam, Unidata_SPARQLGraph, Unidata_SPARQLUsername, Unidata_SPARQLPassword); // Prepara el grafo para su carga en Unidata, para ello coge las URIs de Unidata del SameAs y la aplica a los sujetos y los antiguos sujetos se agregan al SameAs RohGraph unidataGraph = AsioPublication.TransformUrisToUnidata(pDiscoverResult.dataGraph, UnidataDomain, UnidataUriTransform); //Eliminamos los triples de crisIdentifier ya que no hay que volcarlos en unidata { TripleStore store = new TripleStore(); store.Add(unidataGraph); SparqlUpdateParser parser = new SparqlUpdateParser(); //Actualizamos los sujetos SparqlUpdateCommandSet updateSubject = parser.ParseFromString( @" DELETE { ?s ?p ?o. } WHERE { ?s ?p ?o. FILTER(?p =<" + mPropertySGIRohCrisIdentifier + @">) }"); LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(store); processor.ProcessCommandSet(updateSubject); } asioPublicationUnidata.PublishRDF(unidataGraph, pDiscoverResult.ontologyGraph, new KeyValuePair <string, string>(urlDiscoverAgent, "Algoritmos de descubrimiento"), pDiscoverResult.start, pDiscoverResult.end, pDiscoverResult.discoverLinkData, pCallUrisFactoryApiService); } //Lo marcamos como procesado en la BBDD y eliminamos sus metadatos pDiscoverItem.UpdateProcessed(); pDiscoverItemBDService.ModifyDiscoverItem(pDiscoverItem); } //Actualizamos el estado de descubrimiento de la tarea si el estado encolado esta en estado Succeeded o Failed (ha finalizado) //TODO cambiar por query a BBDD string statusQueueJob = pCallCronApiService.GetJob(pDiscoverItem.JobID).State; if ((statusQueueJob == "Failed" || statusQueueJob == "Succeeded")) { ProcessDiscoverStateJob processDiscoverStateJob = pProcessDiscoverStateJobBDService.GetProcessDiscoverStateJobByIdJob(pDiscoverItem.JobID); string state; //Actualizamos a error si existen items en estado error o con problemas de desambiguación if (pDiscoverItemBDService.ExistsDiscoverItemsErrorOrDissambiguatinProblems(pDiscoverItem.JobID)) { state = "Error"; } else if (pDiscoverItemBDService.ExistsDiscoverItemsPending(pDiscoverItem.JobID)) { //Actualizamos a 'Pending' si aún existen items pendientes state = "Pending"; } else { //Actualizamos a Success si no existen items en estado error ni con problemas de desambiguación y no hay ninguno pendiente state = "Success"; } if (processDiscoverStateJob != null) { processDiscoverStateJob.State = state; pProcessDiscoverStateJobBDService.ModifyProcessDiscoverStateJob(processDiscoverStateJob); } else { processDiscoverStateJob = new ProcessDiscoverStateJob() { State = state, JobId = pDiscoverItem.JobID }; pProcessDiscoverStateJobBDService.AddProcessDiscoverStateJob(processDiscoverStateJob); } } } else { //Actualizamos en BBDD DiscoverItem discoverItemBD = pDiscoverItemBDService.GetDiscoverItemById(pDiscoverItem.ID); //Reporte de descubrimiento string discoverReport = "Time processed (seconds): " + pDiscoverResult.secondsProcessed + "\n"; if (pDiscoverResult.reconciliationData.reconciliatedEntitiesWithSubject != null && pDiscoverResult.reconciliationData.reconciliatedEntitiesWithSubject.Count > 0) { discoverReport += "Entities discover with the same uri: " + pDiscoverResult.reconciliationData.reconciliatedEntitiesWithSubject.Count + "\n"; foreach (string uri in pDiscoverResult.reconciliationData.reconciliatedEntitiesWithSubject) { discoverReport += "\t" + uri + "\n"; } } if (pDiscoverResult.reconciliationData.reconciliatedEntitiesWithIds != null && pDiscoverResult.reconciliationData.reconciliatedEntitiesWithIds.Count > 0) { discoverReport += "Entities discover with some common identifier: " + pDiscoverResult.reconciliationData.reconciliatedEntitiesWithIds.Count + "\n"; foreach (string uri in pDiscoverResult.reconciliationData.reconciliatedEntitiesWithIds.Keys) { discoverReport += "\t" + uri + " --> " + pDiscoverResult.reconciliationData.reconciliatedEntitiesWithIds[uri] + "\n"; } } if (pDiscoverResult.reconciliationData.reconciliatedEntitiesWithBBDD != null && pDiscoverResult.reconciliationData.reconciliatedEntitiesWithBBDD.Count > 0) { discoverReport += "Entities discover with reconciliation config: " + pDiscoverResult.reconciliationData.reconciliatedEntitiesWithBBDD.Count + "\n"; foreach (string uri in pDiscoverResult.reconciliationData.reconciliatedEntitiesWithBBDD.Keys) { discoverReport += "\t" + uri + " --> " + pDiscoverResult.reconciliationData.reconciliatedEntitiesWithBBDD[uri] + "\n"; } } if (pDiscoverResult.reconciliationData.reconciliatedEntitiesWithExternalIntegration != null && pDiscoverResult.reconciliationData.reconciliatedEntitiesWithExternalIntegration.Count > 0) { discoverReport += "Entities discover with external integrations config: " + pDiscoverResult.reconciliationData.reconciliatedEntitiesWithExternalIntegration.Count + "\n"; foreach (string uri in pDiscoverResult.reconciliationData.reconciliatedEntitiesWithExternalIntegration.Keys) { discoverReport += "\t" + uri + " --> " + pDiscoverResult.reconciliationData.reconciliatedEntitiesWithExternalIntegration[uri] + "\n"; } } if (pDiscoverResult.discoverLinkData != null && pDiscoverResult.discoverLinkData.entitiesProperties != null && pDiscoverResult.discoverLinkData.entitiesProperties.Count > 0) { discoverReport += "Entities with identifiers obtained with External integration: " + pDiscoverResult.discoverLinkData.entitiesProperties.Count + "\n"; foreach (string uri in pDiscoverResult.discoverLinkData.entitiesProperties.Keys) { foreach (DiscoverLinkData.PropertyData property in pDiscoverResult.discoverLinkData.entitiesProperties[uri]) { foreach (string value in property.valueProvenance.Keys) { foreach (string provenance in property.valueProvenance[value]) { discoverReport += "\t" + uri + " - " + property.property + " - " + value + " --> " + provenance + "\n"; } } } } } discoverItemBD.UpdateReport(pDiscoverResult.discoveredEntitiesProbability, pDiscoverResult.GetDataGraphRDF(), discoverReport); pDiscoverItemBDService.ModifyDiscoverItem(discoverItemBD); } }
/// <summary> /// Aplica el descubrimiento sobre las entidades cargadas en el SGI /// </summary> /// <param name="pSecondsSleep">Segundos para dormir después de procesar una entidad</param> /// <param name="pCallUrisFactoryApiService">Servicio para hacer llamadas a los métodos del Uris Factory</param> public void ApplyDiscoverLoadedEntities(int pSecondsSleep, CallUrisFactoryApiService pCallUrisFactoryApiService) { CallEtlApiService callEtlApiService = _serviceScopeFactory.CreateScope().ServiceProvider.GetRequiredService <CallEtlApiService>(); #region Cargamos configuraciones ConfigSparql ConfigSparql = new ConfigSparql(); string SGI_SPARQLEndpoint = ConfigSparql.GetEndpoint(); string SGI_SPARQLGraph = ConfigSparql.GetGraph(); string SGI_SPARQLQueryParam = ConfigSparql.GetQueryParam(); string SGI_SPARQLUsername = ConfigSparql.GetUsername(); string SGI_SPARQLPassword = ConfigSparql.GetPassword(); string Unidata_SPARQLEndpoint = ConfigSparql.GetUnidataEndpoint(); string Unidata_SPARQLGraph = ConfigSparql.GetUnidataGraph(); string Unidata_SPARQLQueryParam = ConfigSparql.GetUnidataQueryParam(); string Unidata_SPARQLUsername = ConfigSparql.GetUnidataUsername(); string Unidata_SPARQLPassword = ConfigSparql.GetUnidataPassword(); ConfigService ConfigService = new ConfigService(); string UnidataDomain = ConfigService.GetUnidataDomain(); string UnidataUriTransform = ConfigService.GetUnidataUriTransform(); float MaxScore = ConfigService.GetMaxScore(); float MinScore = ConfigService.GetMinScore(); ConfigScopus ConfigScopus = new ConfigScopus(); string ScopusApiKey = ConfigScopus.GetScopusApiKey(); string ScopusUrl = ConfigScopus.GetScopusUrl(); ConfigCrossref ConfigCrossref = new ConfigCrossref(); string CrossrefUserAgent = ConfigCrossref.GetCrossrefUserAgent(); ConfigWOS ConfigWOS = new ConfigWOS(); string WOSAuthorization = ConfigWOS.GetWOSAuthorization(); #endregion DiscoverUtility discoverUtility = new DiscoverUtility(); //Cargar todas las personas en la lista de manera aleatoria. List <string> personList = discoverUtility.GetPersonList(SGI_SPARQLEndpoint, SGI_SPARQLGraph, SGI_SPARQLQueryParam, SGI_SPARQLUsername, SGI_SPARQLPassword); List <string> randomPersonList = GetRandomOrderList(personList); RohGraph ontologyGraph = callEtlApiService.CallGetOntology(); foreach (string person in randomPersonList) { try { //Hora de inicio de la ejecución DateTime startTime = DateTime.Now; //Obtener el RohGraph de una única persona. RohGraph dataGraph = discoverUtility.GetDataGraphPersonLoadedForDiscover(person, SGI_SPARQLEndpoint, SGI_SPARQLGraph, SGI_SPARQLQueryParam, SGI_SPARQLUsername, SGI_SPARQLPassword); //Clonamos el grafo original para hacer luego comprobaciones RohGraph originalDataGraph = dataGraph.Clone(); RohRdfsReasoner reasoner = new RohRdfsReasoner(); reasoner.Initialise(ontologyGraph); RohGraph dataInferenceGraph = dataGraph.Clone(); reasoner.Apply(dataInferenceGraph); bool hasChanges = false; //Dictionary<string, string> discoveredEntityList = new Dictionary<string, string>(); Dictionary <string, Dictionary <string, float> > discoveredEntitiesProbability = new Dictionary <string, Dictionary <string, float> >(); Dictionary <string, ReconciliationData.ReconciliationScore> entidadesReconciliadasConIntegracionExternaAux; Dictionary <string, HashSet <string> > discardDissambiguations = new Dictionary <string, HashSet <string> >(); DiscoverCache discoverCache = new DiscoverCache(); DiscoverCacheGlobal discoverCacheGlobal = new DiscoverCacheGlobal(); //Obtención de la integración externa ReconciliationData reconciliationData = new ReconciliationData(); DiscoverLinkData discoverLinkData = new DiscoverLinkData(); Dictionary <string, List <DiscoverLinkData.PropertyData> > integration = discoverUtility.ExternalIntegration(ref hasChanges, ref reconciliationData, ref discoverLinkData, ref discoveredEntitiesProbability, ref dataGraph, reasoner, null, ontologyGraph, out entidadesReconciliadasConIntegracionExternaAux, discardDissambiguations, discoverCache, discoverCacheGlobal, ScopusApiKey, ScopusUrl, CrossrefUserAgent, WOSAuthorization, MinScore, MaxScore, SGI_SPARQLEndpoint, SGI_SPARQLGraph, SGI_SPARQLQueryParam, SGI_SPARQLUsername, SGI_SPARQLPassword, pCallUrisFactoryApiService, false); //Limpiamos 'integration' para no insertar triples en caso de que ya estén cargados foreach (string entity in integration.Keys.ToList()) { foreach (DiscoverLinkData.PropertyData propertyData in integration[entity].ToList()) { string p = propertyData.property; HashSet <string> objetos = new HashSet <string>(propertyData.valueProvenance.Keys.ToList()); foreach (string o in objetos) { if (((SparqlResultSet)originalDataGraph.ExecuteQuery($@"ASK WHERE {{ ?s ?p ?o. FILTER(?s=<{entity}>) FILTER(?p=<{p}>) FILTER(str(?o)='{o}') }}")).Result) { //Elimiamos el valor porque ya estaba cargado propertyData.valueProvenance.Remove(o); } } if (propertyData.valueProvenance.Count == 0) { integration[entity].Remove(propertyData); } } if (integration[entity].Count == 0) { integration.Remove(entity); } } //Creación de dataGraph con el contenido de 'integration' + RdfTypes + SameAS RohGraph dataGraphIntegration = new RohGraph(); foreach (string sujeto in integration.Keys) { IUriNode s = dataGraphIntegration.CreateUriNode(UriFactory.Create(sujeto)); //Agregamos SameAs y RDFType de las entidades SparqlResultSet sparqlResultSet = (SparqlResultSet)dataGraph.ExecuteQuery("select ?rdftype ?sameas where {?s a ?rdftype. OPTIONAL{?s <http://www.w3.org/2002/07/owl#sameAs> ?sameAS} FILTER(?s=<" + sujeto + ">)}"); foreach (SparqlResult sparqlResult in sparqlResultSet.Results) { string rdfType = sparqlResult["rdftype"].ToString(); IUriNode pRdfType = dataGraphIntegration.CreateUriNode(UriFactory.Create("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")); IUriNode oRdfType = dataGraphIntegration.CreateUriNode(UriFactory.Create(rdfType)); dataGraphIntegration.Assert(new Triple(s, pRdfType, oRdfType)); if (sparqlResult.Variables.Contains("sameas")) { string sameas = sparqlResult["sameas"].ToString(); IUriNode pSameAs = dataGraphIntegration.CreateUriNode(UriFactory.Create("http://www.w3.org/2002/07/owl#sameAs")); IUriNode oSameAs = dataGraphIntegration.CreateUriNode(UriFactory.Create(sameas)); dataGraphIntegration.Assert(new Triple(s, pSameAs, oSameAs)); } } foreach (DiscoverLinkData.PropertyData propertyData in integration[sujeto]) { foreach (string valor in propertyData.valueProvenance.Keys) { IUriNode p = dataGraphIntegration.CreateUriNode(UriFactory.Create(propertyData.property)); if (Uri.IsWellFormedUriString(valor, UriKind.Absolute)) { IUriNode uriNode = dataGraphIntegration.CreateUriNode(UriFactory.Create(propertyData.property)); dataGraphIntegration.Assert(new Triple(s, p, uriNode)); } else { ILiteralNode literalNode = dataGraphIntegration.CreateLiteralNode(valor, new Uri("http://www.w3.org/2001/XMLSchema#string")); dataGraphIntegration.Assert(new Triple(s, p, literalNode)); } foreach (string org in propertyData.valueProvenance[valor]) { //Agregamos los datos de las organizaciones y los grafos SparqlResultSet sparqlResultSetOrgs = (SparqlResultSet)dataGraph.ExecuteQuery("select ?s ?p ?o where {?s ?p ?o. FILTER(?s in(<" + pCallUrisFactoryApiService.GetUri("http://purl.org/roh/mirror/foaf#Organization", org) + ">,<" + pCallUrisFactoryApiService.GetUri("Graph", org) + "> ))}"); foreach (SparqlResult sparqlResult in sparqlResultSetOrgs.Results) { INode sOrg = dataGraphIntegration.CreateUriNode(UriFactory.Create(sparqlResult["s"].ToString())); INode pOrg = dataGraphIntegration.CreateUriNode(UriFactory.Create(sparqlResult["p"].ToString())); if (sparqlResult["o"] is UriNode) { INode oOrg = dataGraphIntegration.CreateUriNode(UriFactory.Create(sparqlResult["o"].ToString())); dataGraphIntegration.Assert(new Triple(sOrg, pOrg, oOrg)); } else if (sparqlResult["o"] is LiteralNode) { INode oOrg = dataGraphIntegration.CreateLiteralNode(((LiteralNode)sparqlResult["o"]).Value, ((LiteralNode)sparqlResult["o"]).DataType); dataGraphIntegration.Assert(new Triple(sOrg, pOrg, oOrg)); } } } } } } //Hora fin de la ejecución DateTime endTime = DateTime.Now; if (integration.Count > 0) { //Si hay datos nuevos los cargamos string urlDiscoverAgent = pCallUrisFactoryApiService.GetUri("Agent", "discover"); //Publicamos en el SGI AsioPublication asioPublication = new AsioPublication(SGI_SPARQLEndpoint, SGI_SPARQLQueryParam, SGI_SPARQLGraph, SGI_SPARQLUsername, SGI_SPARQLPassword); asioPublication.PublishRDF(dataGraphIntegration, null, new KeyValuePair <string, string>(urlDiscoverAgent, "Algoritmos de descubrimiento"), startTime, endTime, discoverLinkData, pCallUrisFactoryApiService); //Preparamos los datos para cargarlos en Unidata RohGraph unidataGraph = dataGraphIntegration.Clone(); #region Si no tiene un sameAs apuntando a Unidata lo eliminamos, no hay que cargar la entidad SparqlResultSet sparqlResultSet = (SparqlResultSet)unidataGraph.ExecuteQuery("select ?s ?rdftype ?sameas where {?s a ?rdftype. OPTIONAL{?s <http://www.w3.org/2002/07/owl#sameAs> ?sameAS} }"); Dictionary <string, bool> entidadesConSameAsUnidata = new Dictionary <string, bool>(); foreach (SparqlResult sparqlResult in sparqlResultSet.Results) { string s = sparqlResult["s"].ToString(); if (!entidadesConSameAsUnidata.ContainsKey(s)) { entidadesConSameAsUnidata.Add(s, false); } if (sparqlResult.Variables.Contains("sameas")) { if (sparqlResult["sameas"].ToString().StartsWith(UnidataDomain)) { entidadesConSameAsUnidata[s] = true; } } } TripleStore store = new TripleStore(); store.Add(unidataGraph); foreach (string entity in entidadesConSameAsUnidata.Keys) { if (!entidadesConSameAsUnidata[entity]) { //Cambiamos candidato.Key por entityID SparqlUpdateParser parser = new SparqlUpdateParser(); SparqlUpdateCommandSet delete = parser.ParseFromString(@"DELETE { ?s ?p ?o. } WHERE { ?s ?p ?o. FILTER(?s = <" + entity + @">) }"); LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(store); processor.ProcessCommandSet(delete); } } #endregion //TODO descomentar cuando esté habilitaado Unidata ////Si hay triples para cargar en Unidata procedemos //if (unidataGraph.Triples.ToList().Count > 0) //{ // //Publicamos en UNIDATA // AsioPublication asioPublicationUnidata = new AsioPublication(Unidata_SPARQLEndpoint, Unidata_SPARQLQueryParam, Unidata_SPARQLGraph, Unidata_SPARQLUsername, Unidata_SPARQLPassword); // // Prepara el grafo para su carga en Unidata, para ello coge las URIs de Unidata del SameAs y la aplica a los sujetos y los antiguos sujetos se agregan al SameAs // unidataGraph = AsioPublication.TransformUrisToUnidata(unidataGraph, UnidataDomain, UnidataUriTransform); // asioPublicationUnidata.PublishRDF(unidataGraph, null, new KeyValuePair<string, string>(urlDiscoverAgent, "Algoritmos de descubrimiento"), startTime, endTime, discoverLinkData,pCallUrisFactoryApiService); //} } } catch (Exception exception) { Logging.Error(exception); } Thread.Sleep(pSecondsSleep * 1000); } }
public IActionResult ResolveDiscover(string idJob, string IdDiscoverItem, Dictionary <string, string> DissambiguationProblemsResolve) { DiscoverItem item = _discoverItemService.GetDiscoverItemById(new Guid(IdDiscoverItem)); //Cargamos el RDF RohGraph dataGraph = new RohGraph(); dataGraph.LoadFromString(item.DiscoverRdf, new RdfXmlParser()); //Modificamos el RDF TripleStore store = new TripleStore(); store.Add(dataGraph); //Cambiamos candidato.Key por entityID foreach (string uriOriginal in DissambiguationProblemsResolve.Keys) { if (!string.IsNullOrEmpty(DissambiguationProblemsResolve[uriOriginal])) { //En caso de que la resolución sea una URI de Unidata añadimos el SameAs if (!string.IsNullOrEmpty(_unidataPrefix.GetUnidataDomain()) && DissambiguationProblemsResolve[uriOriginal].StartsWith(_unidataPrefix.GetUnidataDomain())) { IUriNode t_subject = dataGraph.CreateUriNode(UriFactory.Create(uriOriginal)); IUriNode t_predicate = dataGraph.CreateUriNode(UriFactory.Create("http://www.w3.org/2002/07/owl#sameAs")); IUriNode t_object = dataGraph.CreateUriNode(UriFactory.Create(DissambiguationProblemsResolve[uriOriginal])); dataGraph.Assert(new Triple(t_subject, t_predicate, t_object)); } else { //En caso de que la resolución NO sea una URI de Unidata modificamos las URLs SparqlUpdateParser parser = new SparqlUpdateParser(); //Actualizamos los sujetos SparqlUpdateCommandSet updateSubject = parser.ParseFromString(@"DELETE { ?s ?p ?o. } INSERT{<" + DissambiguationProblemsResolve[uriOriginal] + @"> ?p ?o.} WHERE { ?s ?p ?o. FILTER(?s = <" + uriOriginal + @">) }"); //Actualizamos los objetos SparqlUpdateCommandSet updateObject = parser.ParseFromString(@"DELETE { ?s ?p ?o. } INSERT{?s ?p <" + DissambiguationProblemsResolve[uriOriginal] + @">.} WHERE { ?s ?p ?o. FILTER(?o = <" + uriOriginal + @">) }"); LeviathanUpdateProcessor processor = new LeviathanUpdateProcessor(store); processor.ProcessCommandSet(updateSubject); processor.ProcessCommandSet(updateObject); } } } System.IO.StringWriter sw = new System.IO.StringWriter(); RdfXmlWriter rdfXmlWriter = new RdfXmlWriter(); rdfXmlWriter.Save(dataGraph, sw); string rdfXml = sw.ToString(); Stream stream = new MemoryStream(Encoding.UTF8.GetBytes(rdfXml)); FormFile file = new FormFile(stream, 0, stream.Length, "rdfFile", "rdf.xml"); //Actualizamos el item Dictionary <string, List <string> > discards = new Dictionary <string, List <string> >(); foreach (DiscoverItem.DiscoverDissambiguation dissambiguation in item.DissambiguationProblems) { if (DissambiguationProblemsResolve.ContainsKey(dissambiguation.IDOrigin) && DissambiguationProblemsResolve[dissambiguation.IDOrigin] == null) { discards.Add(dissambiguation.IDOrigin, dissambiguation.DissambiguationCandiates.Select(x => x.IDCandidate).ToList()); } } item.UpdateDissambiguationDiscards(discards, rdfXml); item.DiscoverRdf = rdfXml; item.Status = "Pending"; _discoverItemService.ModifyDiscoverItem(item); //Lo reencolamos corregido junto con su identificador _callEDtlPublishService.CallDataPublish(file, idJob, false, IdDiscoverItem); return(RedirectToAction("DetailsJob", "Job", new { id = idJob })); }