void StatementSource.Select(StatementSink sink) { foreach (IStatement statement in Graph.Select(null, null, null)) { sink.Add(Mapper.fromKE(statement)); } }
void SelectableSource.Select(Statement template, StatementSink sink) { foreach (IStatement statement in Graph.Select(Mapper.toKE(template.Subject), Mapper.toKE(template.Predicate), Mapper.toKE(template.Object))) { sink.Add(Mapper.fromKE(statement)); } }
public bool Add(Statement s) { if (s.AnyNull) { throw new ArgumentNullException(); } if (domran == 1 && !(s.Object is Entity)) { return(true); } if (!map.ContainsKey(s.Predicate)) { return(true); // shouldn't really happen } foreach (Entity e in (ResSet)map[s.Predicate]) { Statement s1 = new Statement( domran == 0 ? s.Subject : (Entity)s.Object, type, e, s.Meta); if (!sink.Add(s1)) { return(false); } } return(true); }
public void Select(Statement template, StatementSink sink) { // extract the fields for easy access Entity subj = template.Subject; Entity pred = template.Predicate; Resource obj = template.Object; // convert the SemWeb fields to the RDFQuery fields Uri s; string p, o; rdf_to_beagle_hook(subj, pred, obj, out s, out p, out o); RDFQuery query = new RDFQuery(s, p, o); RDFQueryResult result = (RDFQueryResult)query.Send(); foreach (Hit hit in result.Hits) { Entity subject = new Entity(hit.Uri.ToString()); //FIXME: Do we have to use strings here? foreach (Property prop in hit.Properties) { Entity predicate = BeaglePropertyToEntity(prop.Type, prop.Key); Resource _object; property_to_rdf_hook(prop, out _object); // now create a the statement and add it to the result Statement st = new Statement(subject, predicate, _object); sink.Add(st); } } }
public static void AddLiteral(StatementSink sink, string predicate, string type, SemWeb.Literal value) { Entity empty = new BNode(); Statement top = new Statement(FSpotXMPBase, (Entity)MetadataStore.Namespaces.Resolve(predicate), empty); Statement desc = new Statement(empty, (Entity)MetadataStore.Namespaces.Resolve("rdf:type"), (Entity)MetadataStore.Namespaces.Resolve(type)); sink.Add(desc); Statement literal = new Statement(empty, (Entity)MetadataStore.Namespaces.Resolve("rdf:li"), value); sink.Add(literal); sink.Add(top); }
public bool Add(Statement s) { if (domran == 1 && !(s.Object is Entity)) { return(true); } ResSet rs = (ResSet)table[s.Predicate]; if (rs == null) { return(true); } foreach (Entity e in RDFS.GetClosure(rs, superclasses, true)) { Statement s1 = new Statement( domran == 0 ? s.Subject : (Entity)s.Object, type, e, s.Meta); if (!sink.Add(s1)) { return(false); } } return(true); }
/// <summary> /// Selects all statements from this source and streams them into the given Sink /// </summary> /// <param name="sink">Statement Sink</param> public void Select(StatementSink sink) { foreach (IGraph g in this._store.Graphs) { //Get the Hash Code of the Graphs URI and create a new empty mapping if necessary Entity graphUri; int hash; if (g.BaseUri == null) { graphUri = new Entity(GraphCollection.DefaultGraphUri); hash = new Uri(GraphCollection.DefaultGraphUri).GetEnhancedHashCode(); } else { graphUri = new Entity(g.BaseUri.ToString()); hash = g.BaseUri.GetEnhancedHashCode(); } SemWebMapping mapping = this.GetMapping(hash, g); foreach (Triple t in g.Triples) { Statement stmt = SemWebConverter.ToSemWeb(t, mapping); stmt.Meta = graphUri; if (!sink.Add(stmt)) { return; } } } }
void WriteGraph(RdfGraph graph, RdfSourceWrapper sourcewrapper, StatementSink sink) { java.util.Iterator iter = graph.iterator(); while (iter.hasNext()) { GraphStatement stmt = (GraphStatement)iter.next(); Statement s; if (stmt is GraphStatementWrapper) { s = ((GraphStatementWrapper)stmt).s; } else { s = new Statement( sourcewrapper.ToEntity(stmt.getSubject()), sourcewrapper.ToEntity(stmt.getPredicate()), sourcewrapper.ToResource(stmt.getObject()), stmt.getGraphName() == null ? Statement.DefaultMeta : sourcewrapper.ToEntity(stmt.getGraphName())); } if (s.AnyNull) { continue; // unbound variable, or literal in bad position } sink.Add(s); } }
/// <summary> /// Selects Statements that match a given Template /// </summary> /// <param name="template">Statement Template</param> /// <param name="sink">Sink</param> public void Select(Statement template, StatementSink sink) { //Implement as a SPARQL SELECT List <ITriplePattern> patterns = this.TemplateToTriplePatterns(template); StringBuilder query = new StringBuilder(); query.AppendLine("SELECT * WHERE {"); foreach (ITriplePattern pattern in patterns) { query.AppendLine(pattern.ToString() + "."); } query.AppendLine("}"); //Get the Results Object results = this._store.ExecuteQuery(query.ToString()); if (results is SparqlResultSet) { SparqlResultSet rset = (SparqlResultSet)results; foreach (SparqlResult r in rset) { Entity s = (template.Subject != null) ? template.Subject : SemWebConverter.ToSemWebEntity(r["s"], this._mapping); Entity p = (template.Predicate != null) ? template.Predicate : SemWebConverter.ToSemWebEntity(r["p"], this._mapping); Resource o = (template.Object != null) ? template.Object : SemWebConverter.ToSemWeb(r["o"], this._mapping); Statement stmt = new Statement(s, p, o); //Keep returning stuff until the sink tells us to stop if (!sink.Add(stmt)) { return; } } } }
public void Select(Statement template, StatementSink result) { StatementList source = statements; // The first time select is called, turn indexing on for the store. // TODO: Perform this index in a background thread if there are a lot // of statements. if (!isIndexed && allowIndexing) { isIndexed = true; for (int i = 0; i < StatementCount; i++) { Statement statement = this[i]; GetIndexArray(statementsAboutSubject, statement.Subject).Add(statement); GetIndexArray(statementsAboutObject, statement.Object).Add(statement); } } if (template.Subject != null) { ShorterList(ref source, GetIndexArray(statementsAboutSubject, template.Subject)); } else if (template.Object != null) { ShorterList(ref source, GetIndexArray(statementsAboutObject, template.Object)); } if (source == null) { return; } bool isRdfsMemberPredicate = (template.Predicate != null && template.Predicate.Uri != null && template.Predicate.Uri == NS.RDFS + "member"); if (isRdfsMemberPredicate) { template.Predicate = null; } for (int i = 0; i < source.Count; i++) { Statement statement = source[i]; if (!template.Matches(statement)) { continue; } if (isRdfsMemberPredicate && (statement.Predicate.Uri == null || !statement.Predicate.Uri.StartsWith(rdfli))) { continue; } if (!result.Add(statement)) { return; } } }
public static void AddLiteral(StatementSink sink, string predicate, string value) { Statement stmt = new Statement(FSpotXMPBase, (Entity)MetadataStore.Namespaces.Resolve(predicate), new SemWeb.Literal(value)); sink.Add(stmt); }
private void Add(StatementSink store, Statement statement, Location position) { try { store.Add(statement); } catch (Exception e) { OnError("Add failed on statement { " + statement + " }: " + e.Message, position, e); } }
public bool Add(Statement s) { if (b.Contains(s)) { return(true); } return(c.Add(s)); }
public bool Add(Statement s) { if (filters != null && filters.Length > 0 && !LiteralFilter.MatchesFilters(s.Object, filters, model)) { return(true); } return(sink.Add(s)); }
protected void SelectBySubject(SelectFilter filter, StatementSink sink) { var selectIdsSourceHash = new Dictionary <SourceDescriptor, IList <object> >(); for (int i = 0; i < filter.Subjects.Length; i++) { var subj = filter.Subjects[i]; foreach (var sourceDescr in FindSourceByItemSubject(subj.Uri)) { var itemId = ExtractSourceId(sourceDescr, subj.Uri); AddToHashList(selectIdsSourceHash, sourceDescr, itemId); } } foreach (var sourceEntry in selectIdsSourceHash) { if (filter.Predicates != null) { // case 1.1: is predicate is defined var sourceFlds = new List <FieldDescriptor>(); for (int i = 0; i < filter.Predicates.Length; i++) { var pred = filter.Predicates[i]; // check for "type" predicate if (pred == NS.Rdf.typeEntity) { for (int j = 0; j < sourceEntry.Value.Count; j++) { if (!sink.Add(new Statement( GetSourceItemEntity(sourceEntry.Key, sourceEntry.Value[j]), pred, EntitySourceHash[sourceEntry.Key]))) { return; } } continue; } if (FieldNsSourceHash[sourceEntry.Key].ContainsKey(pred.Uri)) { sourceFlds.Add(FieldNsSourceHash[sourceEntry.Key][pred.Uri]); } } if (sourceFlds.Count > 0) { LoadToSink(sourceEntry.Key, sourceEntry.Value, sourceFlds, filter.Objects, sink, filter); } } else { // case 1.2: predicate is undefined LoadToSink(sourceEntry.Key, sourceEntry.Value, null, filter.Objects, sink, filter); } } }
public bool Add(Statement s) { if (table[s.Predicate] == null) { return(sink.Add(s)); } else { foreach (Entity e in (ResSet)table[s.Predicate]) { if (!sink.Add(new Statement(s.Subject, e, s.Object, s.Meta))) { return(false); } } } return(true); }
/// <summary> /// Selects all statements from this source and streams them into the given Sink /// </summary> /// <param name="sink">Statement Sink</param> /// <remarks> /// This is essentially the same code as the <see cref="SemWebConverter.ToSemWeb">ToSemWeb(IGraph g, StatementSink sink)</see> function but we need to maintain a consistent mapping of BNodes for the source /// </remarks> public void Select(StatementSink sink) { foreach (Triple t in this._g.Triples) { Statement stmt = SemWebConverter.ToSemWeb(t, this._mapping); if (!sink.Add(stmt)) { return; } } }
/// <summary> /// Takes the contents of a dotNetRDF Graph and inputs it into a SemWeb StatementSink /// </summary> /// <param name="g">Graph</param> /// <param name="mapping">Blank Node Mapping</param> /// <param name="sink">Statement Sink</param> public static void ToSemWeb(IGraph g, SemWebMapping mapping, StatementSink sink) { Statement stmt; foreach (Triple t in g.Triples) { stmt = ToSemWeb(t, mapping); //Stop adding statements if the sink tells up to stop if (!sink.Add(stmt)) return; } }
public bool Add(Statement s) { foreach (Entity e in RDFS.GetClosure(new Resource[] { s.Object }, table, true)) { if (!sink.Add(new Statement(s.Subject, s.Predicate, e, s.Meta))) { return(false); } } return(true); }
void ReadLoop(StatementSink sink) { while (!finished) { int nr = nextRead; // Check that we can advance (i.e. not cross the write pointer). while (nr == nextWrite && !finished) { if (SLEEP_DURATION > 0) { Thread.Sleep(SLEEP_DURATION); } else { hasData.WaitOne(); } } if (finished) { return; } int nw = nextWrite; int addctr = 0; while (nr != nw) { Statement s = buffer[nr]; nr = (nr == len - 1) ? 0 : nr + 1; if ((addctr++ & 0xFF) == 0) { nextRead = nr; hasSpace.Set(); } canceled = !sink.Add(s); if (canceled) { break; } } nextRead = nr; hasSpace.Set(); if (canceled) { break; } } }
public bool Add(Statement s) { ranges.ToString(); // avoid warning about not using variable if (s.Object is Literal && ((Literal)s.Object).DataType == null) { // TODO: Look at the superproperty closure of the predicate // and apply the first range found to the literal. While // more than one range may apply, we can only assign one. // It would be best to assign the most specific, but we // don't have that info. And, don't ever assign the rdfs:Literal // or rdfs:Resource classes as the data type -- and there may be // others -- that are consistent but just not data types. // Also, assign the most specific data type if we have // the class relations among them. return(sink.Add(s)); } else { return(sink.Add(s)); } }
void SelectableSource.Select(SelectFilter filter, StatementSink sink) { foreach (ISPARQLResult result in Graph.Query("SELECT DISTINCT * WHERE { ?s ?p ?o }").Results) { Resource s = Mapper.fromKE(result["s"]); Resource p = Mapper.fromKE(result["p"]); Resource o = Mapper.fromKE(result["o"]); if (passesFilter(filter, s, p, o)) { sink.Add(new Statement(s as Entity, p as Entity, o)); } } }
public bool Add(Statement s) { if (resetMeta) { s.Meta = Statement.DefaultMeta; } if (hash.ContainsKey(s)) { return(true); } hash[s] = hash; return(sink.Add(s)); }
public void Select(Statement template, StatementSink sink) { if (template.Meta != null && template.Meta != Statement.DefaultMeta) { return; } if (template.Predicate != null && template.Predicate.Uri == null) { return; } if (template.Subject == null && template.Predicate == null && template.Object == null) { Select(sink); } else if (template.Subject != null && template.Predicate != null && template.Object != null) { template.Meta = Statement.DefaultMeta; if (Contains(template)) { sink.Add(template); } } else if (template.Predicate == null) { if (template.Subject == null) { SelectAllObject(template.Object, sink); } else if (template.Object == null) { SelectAllSubject(template.Subject, sink); } else { SelectRelationsBetween(template.Subject, template.Object, sink); } } else if (template.Subject != null && template.Object == null) { SelectObjects(template.Subject, template.Predicate, sink); } else if (template.Subject == null && template.Object != null) { SelectSubjects(template.Predicate, template.Object, sink); } else if (template.Subject == null && template.Object == null) { SelectAllPairs(template.Predicate, sink); } }
/// <summary> /// Takes the contents of a dotNetRDF Graph and inputs it into a SemWeb StatementSink /// </summary> /// <param name="g">Graph</param> /// <param name="mapping">Blank Node Mapping</param> /// <param name="sink">Statement Sink</param> public static void ToSemWeb(IGraph g, SemWebMapping mapping, StatementSink sink) { Statement stmt; foreach (Triple t in g.Triples) { stmt = ToSemWeb(t, mapping); //Stop adding statements if the sink tells up to stop if (!sink.Add(stmt)) { return; } } }
public bool Add(Statement statement) { if (statement.Predicate == Predicate) { if (PushedPairs.ContainsKey(statement.Subject)) { return(true); } else { PushedPairs[statement.Subject] = statement.Object; } } return(Sink.Add(statement)); }
public static void Add(StatementSink sink, Entity subject, string predicate, string type, string [] values) { if (values == null) { System.Console.WriteLine("{0} has no values; skipping", predicate); return; } Entity empty = new SemWeb.BNode(); Statement top = new Statement(subject, (Entity)MetadataStore.Namespaces.Resolve(predicate), empty); Statement desc = new Statement(empty, (Entity)MetadataStore.Namespaces.Resolve("rdf:type"), (Entity)MetadataStore.Namespaces.Resolve(type)); sink.Add(desc); foreach (string value in values) { Statement literal = new Statement(empty, (Entity)MetadataStore.Namespaces.Resolve("rdf:li"), new SemWeb.Literal(value, null, null)); sink.Add(literal); } sink.Add(top); }
/// <summary> /// Selects Statements from the Source based on a Template /// </summary> /// <param name="template">Statement Template</param> /// <param name="sink">Sink to stream results to</param> public void Select(Statement template, StatementSink sink) { //Convert Template to an Enumerable IEnumerable <Triple> ts = this.TemplateToEnumerable(template); foreach (Triple t in ts) { //Keep streaming Triples until the sink tells us to stop Statement stmt = SemWebConverter.ToSemWeb(t, this._mapping); if (!sink.Add(stmt)) { return; } } }
private void Add(StatementSink store, Statement statement, Location position) { try { store.Add(statement); } catch (Exception e) { if (!addFailuresAsWarnings) { OnError("Add failed on statement { " + statement + " }: " + e.Message, position, e); } else { OnWarning("Add failed on statement { " + statement + " }: " + e.Message, position, e); } } }
public override bool Add(VariableBindings result) { Resource subj = si == -1 ? d.Subject : result.Values[si]; Resource pred = pi == -1 ? d.Predicate : result.Values[pi]; Resource obj = oi == -1 ? d.Object : result.Values[oi]; if (!(subj is Entity) || !(pred is Entity)) { return(true); } if (litFilters != null && !LiteralFilter.MatchesFilters(obj, litFilters, null)) { return(true); } return(sink.Add(new Statement((Entity)subj, (Entity)pred, obj))); }
} // not sure... public override void Select(SelectFilter filter, SelectableSource targetModel, StatementSink sink) { if (filter.Subjects == null) { filter.Subjects = new Entity[] { new Variable("subject") } } ; if (filter.Predicates == null) { filter.Predicates = new Entity[] { new Variable("predicate") } } ; if (filter.Objects == null) { filter.Objects = new Entity[] { new Variable("object") } } ; if (filter.Metas == null) { filter.Metas = new Entity[] { Statement.DefaultMeta } } ; foreach (Statement s in filter) // until we can operate on filter directly { ArrayList evidence = prove(rules, targetModel, new Statement[] { s }, -1); if (evidence == null) { continue; // not provable (in max number of steps, if that were given) } foreach (EvidenceItem ei in evidence) { foreach (Statement h in ei.head) // better be just one statement { if (filter.LiteralFilters != null && !LiteralFilter.MatchesFilters(h.Object, filter.LiteralFilters, targetModel)) { continue; } sink.Add(h); } } } }
public override bool Distinct { get { return false; } } // not sure... public override void Select(SelectFilter filter, SelectableSource targetModel, StatementSink sink) { if (filter.Subjects == null) filter.Subjects = new Entity[] { new Variable("subject") }; if (filter.Predicates == null) filter.Predicates = new Entity[] { new Variable("predicate") }; if (filter.Objects == null) filter.Objects = new Entity[] { new Variable("object") }; if (filter.Metas == null) filter.Metas = new Entity[] { Statement.DefaultMeta }; foreach (Statement s in filter) { // until we can operate on filter directly ArrayList evidence = prove(rules, targetModel, new Statement[] { s }, -1); if (evidence == null) continue; // not provable (in max number of steps, if that were given) foreach (EvidenceItem ei in evidence) { foreach (Statement h in ei.head) { // better be just one statement if (filter.LiteralFilters != null && !LiteralFilter.MatchesFilters(h.Object, filter.LiteralFilters, targetModel)) continue; sink.Add(h); } } } }
/// <summary> /// Selects all Statements from the underlying Store and adds them to a SemWeb Sink /// </summary> /// <param name="sink">Statement Sink</param> public void Select(StatementSink sink) { //Use a CONSTRUCT to get the Statements String query = "CONSTRUCT {?s ?p ?o} WHERE {?s ?p ?o}"; Object results = this._store.ExecuteQuery(query); if (results is Graph) { Graph g = (Graph)results; foreach (Triple t in g.Triples) { Statement stmt = SemWebConverter.ToSemWeb(t, this._mapping); //Keep returning stuff until it tells us to stop if (!sink.Add(stmt)) return; } } }
bool Select(Entity[] subjects, Entity[] predicates, Resource[] objects, Entity[] metas, LiteralFilter[] litFilters, int limit, StatementSink sink, bool ask) { // TODO: Change meta into named graphs. Anything but a null or DefaultMeta // meta returns no statements immediately. if (metas != null && (metas.Length != 1 || metas[0] != Statement.DefaultMeta)) return false; string query; bool nonull = false; if (subjects != null && subjects.Length == 1 && predicates != null && predicates.Length == 1 && objects != null && objects.Length == 1) { query = "ASK WHERE { " + S(subjects[0], null) + " " + S(predicates[0], null) + " " + S(objects[0], null) + "}"; nonull = true; } else { if (ask) query = "ASK"; else query = "SELECT *"; query += " WHERE { "; query += S(subjects, "subject"); query += " "; query += S(predicates, "predicate"); query += " "; query += S(objects, "object"); query += " . "; query += SL(subjects, "subject"); query += SL(predicates, "predicate"); query += SL(objects, "object"); query += " }"; // TODO: Pass literal filters to server. } if (limit >= 1) query += " LIMIT " + limit; XmlDocument result = Load(query); if (ask || nonull) { foreach (XmlElement boolean in result.DocumentElement) { if (boolean.Name != "boolean") continue; bool ret = boolean.InnerText == "true"; if (ask) return ret; else if (ret) sink.Add(new Statement(subjects[0], predicates[0], objects[0])); return false; } throw new ApplicationException("Invalid server response: No boolean node."); } XmlElement bindings = null; foreach (XmlElement e in result.DocumentElement) if (e.Name == "results") bindings = e; if (bindings == null) throw new ApplicationException("Invalid server response: No result node."); MemoryStore distinctCheck = null; if (bindings.GetAttribute("distinct") != "true") distinctCheck = new MemoryStore(); Hashtable bnodes = new Hashtable(); foreach (XmlNode bindingnode in bindings) { if (!(bindingnode is XmlElement)) continue; XmlElement binding = (XmlElement)bindingnode; Resource subj = GetBinding(binding, "subject", subjects, bnodes); Resource pred = GetBinding(binding, "predicate", predicates, bnodes); Resource obj = GetBinding(binding, "object", objects, bnodes); if (!(subj is Entity) || !(pred is Entity)) continue; Statement s = new Statement((Entity)subj, (Entity)pred, obj); if (distinctCheck != null && distinctCheck.Contains(s)) continue; if (litFilters != null && !LiteralFilter.MatchesFilters(s.Object, litFilters, this)) continue; if (!sink.Add(s)) return true; if (distinctCheck != null) distinctCheck.Add(s); } return true; }
public static void Add (StatementSink sink, Entity subject, string predicate, string type, string [] values) { if (values == null) { System.Console.WriteLine ("{0} has no values; skipping", predicate); return; } Entity empty = new SemWeb.BNode(); Statement top = new Statement (subject, (Entity)MetadataStore.Namespaces.Resolve (predicate), empty); Statement desc = new Statement (empty, (Entity)MetadataStore.Namespaces.Resolve ("rdf:type"), (Entity)MetadataStore.Namespaces.Resolve (type)); sink.Add (desc); foreach (string value in values) { Statement literal = new Statement (empty, (Entity)MetadataStore.Namespaces.Resolve ("rdf:li"), new SemWeb.Literal (value, null, null)); sink.Add (literal); } sink.Add (top); }
public static void AddLiteral (StatementSink sink, string predicate, string type, SemWeb.Literal value) { Entity empty = new BNode (); Statement top = new Statement (FSpotXMPBase, (Entity)MetadataStore.Namespaces.Resolve (predicate), empty); Statement desc = new Statement (empty, (Entity)MetadataStore.Namespaces.Resolve ("rdf:type"), (Entity)MetadataStore.Namespaces.Resolve (type)); sink.Add (desc); Statement literal = new Statement (empty, (Entity)MetadataStore.Namespaces.Resolve ("rdf:li"), value); sink.Add (literal); sink.Add (top); }
private static bool MakeLeanMSG3(Store msg, ResSet predicates, StatementSink removed, ResSet nodesremoved, SyncPath path) { // The variable path has to be expanded by including the statements // connected to the variables on the frontier. Statements // mentioning a variable node have already been considered. // The target of each such statement can be considered fixed // or variable. If a variable is considered fixed, the edge // must exist in the MSG substituting the variables for their // values. If it's variable, it has to have at least one // match in the MSG but not as any of the variable nodes. // If all targets are considered fixed (and have matches), // then the variables so far (and their edges) can all be // removed and no more processing needs to be done. // There are (2^N)-1 other considerations. For each of those, // the targets considered variables all become the new // frontier, and this is repeated. // First, get a list of edges from the frontier that we // haven't considered yet. ArrayList alledges = new ArrayList(); foreach (BNode b in path.FrontierVariables) { // Make sure all edges are kept because even the ones // to literals have to be removed when duplication is found. foreach (Statement s in msg.Select(new Statement(b, null, null))) alledges.Add(new Edge(true, b, s.Predicate, s.Object)); foreach (Statement s in msg.Select(new Statement(null, null, b))) alledges.Add(new Edge(false, b, s.Predicate, s.Subject)); } ArrayList newedges = new ArrayList(); ResSet alltargets = new ResSet(); ResSet fixabletargetsset = new ResSet(); // can be fixed ResSet variabletargetsset = new ResSet(); // must be variable foreach (Edge e in alledges) { if (path.Path.ContainsKey(e)) continue; path.Path[e] = e; // This checks if we can keep the target of this edge // fixed, given the variable mappings we have so far. bool isTargetFixable = msg.Contains(e.AsStatement().Replace(path.Mapping)); // If the target of e is any of the following, we // can check immediately if the edge is supported // by the MSG under the variable mapping we have so far: // a named node, literal, fixed node, or predicate // a variable we've seen already // If it's not supported, this path fails. If it is // supported, we're done with this edge. if (!(e.End is BNode) || path.FixedNodes.Contains(e.End) || predicates.Contains(e.End) || path.VariableNodes.Contains(e.End)) { if (!isTargetFixable) return false; continue; // this edge is supported, so we can continue } // The target of e is a new BNode. // If this target is not fixable via this edge, it's // not fixable at all. if (!isTargetFixable) { fixabletargetsset.Remove(e.End); variabletargetsset.Add(e.End); } if (!alltargets.Contains(e.End)) { alltargets.Add(e.End); fixabletargetsset.Add(e.End); } newedges.Add(e); } // If all of the targets were fixable (trivially true also // if there simple were no new edges/targets), then we've reached // the end of this path. We can immediately remove // the edges we've seen so far, under the variable mapping // we've chosen. if (variabletargetsset.Count == 0) { foreach (Edge e in path.Path.Keys) { Statement s = e.AsStatement(); msg.Remove(s); if (removed != null) removed.Add(s); } foreach (Entity e in path.Mapping.Keys) nodesremoved.Add(e); return true; } // At this point, at least one target must be a variable // and we'll have to expand the path in that direction. // We might want to permute through the ways we can // take fixable nodes as either fixed or variable, but // we'll be greedy and assume everything fixable is // fixed and everything else is a variable. path.FixedNodes.AddRange(fixabletargetsset); path.VariableNodes.AddRange(variabletargetsset); // But we need to look at all the ways each variable target // can be mapped to a new value, which means intersecting // the possible matches for each relevant edge. Entity[] variables = variabletargetsset.ToEntityArray(); ResSet[] values = new ResSet[variables.Length]; Entity[][] values_array = new Entity[variables.Length][]; int[] choices = new int[variables.Length]; for (int i = 0; i < variables.Length; i++) { foreach (Edge e in newedges) { if (e.End != variables[i]) continue; // Get the possible values this edge allows Resource[] vr; if (e.Direction) vr = msg.SelectObjects((Entity)path.Mapping[e.Start], e.Predicate); else vr = msg.SelectSubjects(e.Predicate, (Entity)path.Mapping[e.Start]); // Filter out literals and any variables // on the path! The two paths can't intersect // except at fixed nodes. ResSet v = new ResSet(); foreach (Resource r in vr) { if (r is Literal) continue; if (path.Mapping.ContainsKey(r)) continue; v.Add(r); } // Intersect these with the values we have already. if (values[i] == null) values[i] = v; else values[i].RetainAll(v); // If no values are available for this variable, // we're totally done. if (values[i].Count == 0) return false; } choices[i] = values[i].Count; values_array[i] = values[i].ToEntityArray(); } // Now we have to permute through the choice of values. // Make an array of the number of choices for each variable. Permutation p = new Permutation(choices); int[] pstate; while ((pstate = p.Next()) != null) { SyncPath newpath = new SyncPath(); newpath.FixedNodes.AddRange(path.FixedNodes); newpath.VariableNodes.AddRange(path.VariableNodes); newpath.Mapping = (Hashtable)path.Mapping.Clone(); newpath.Path = (Hashtable)path.Path.Clone(); newpath.FrontierVariables = variabletargetsset; for (int i = 0; i < variables.Length; i++) { Entity value = values_array[i][pstate[i]]; newpath.Mapping[variables[i]] = value; newpath.FixedNodes.Add(value); } if (MakeLeanMSG3(msg, predicates, removed, nodesremoved, newpath)) return true; } return false; }
public void SelectDirectory (ImageDirectory dir, StatementSink sink) { foreach (DirectoryEntry e in dir.Entries) { #if DEBUG_LOADER System.Console.WriteLine ("{0}", e.Id); #endif switch (e.Id) { case TagId.IPTCNAA: System.IO.Stream iptcstream = new System.IO.MemoryStream (e.RawData); FSpot.Iptc.IptcFile iptc = new FSpot.Iptc.IptcFile (iptcstream); iptc.Select (sink); break; case TagId.PhotoshopPrivate: System.IO.Stream bimstream = new System.IO.MemoryStream (e.RawData); FSpot.Bim.BimFile bim = new FSpot.Bim.BimFile (bimstream); bim.Select (sink); break; case TagId.XMP: System.IO.Stream xmpstream = new System.IO.MemoryStream (e.RawData); FSpot.Xmp.XmpFile xmp = new FSpot.Xmp.XmpFile (xmpstream); xmp.Select (sink); break; case TagId.ImageDescription: MetadataStore.AddLiteral (sink, "dc:description", "rdf:Alt", new Literal (e.ValueAsString [0], "x-default", null)); break; case TagId.UserComment: MetadataStore.AddLiteral (sink, "exif:UserComment", "rdf:Alt", new Literal (e.ValueAsString [0], "x-default", null)); break; case TagId.Copyright: MetadataStore.AddLiteral (sink, "dc:rights", "rdf:Alt", new Literal (e.ValueAsString [0], "x-default", null)); break; case TagId.Artist: MetadataStore.Add (sink, "dc:creator", "rdf:Seq", e.ValueAsString); break; case TagId.ExifIfdPointer: try { ImageDirectory sub = ((SubdirectoryEntry)e).Directory [0]; SelectDirectory (sub, sink); } catch (System.Exception exc) { System.Console.WriteLine (exc); } break; case TagId.Software: MetadataStore.AddLiteral (sink, "xmp:CreatorTool", e.ValueAsString [0]); break; case TagId.DateTime: try { MetadataStore.AddLiteral (sink, "xmp:ModifyDate", e.ValueAsDate.ToString ("yyyy-MM-ddThh:mm:ss")); } catch (System.Exception ex) { System.Console.WriteLine (String.Format ("error parsing {0}{2}{1}", e.ValueAsString[0], ex, Environment.NewLine)); } break; case TagId.DateTimeOriginal: case TagId.DateTimeDigitized: // FIXME subsectime needs to be included in these values // FIXME shouldn't DateTimeOriginal be xmp:CreateDate? the spec says no but wtf? try { MetadataStore.AddLiteral (sink, "exif:" + e.Id.ToString (), e.ValueAsDate.ToString ("yyyy-MM-ddThh:mm:ss")); } catch (System.Exception ex) { System.Console.WriteLine (String.Format ("error parsing {0}{2}{1}", e.ValueAsString[0], ex, Environment.NewLine)); } break; //case TagId.SpatialFrequencyResponse case TagId.ExifCFAPattern: CFAPattern pattern = new CFAPattern (e.RawData, e.IsLittle); Entity empty = new BNode (); Statement top = new Statement (MetadataStore.FSpotXMPBase, (Entity)MetadataStore.Namespaces.Resolve ("exif:" + e.Id.ToString ()), empty); Statement cols = new Statement (empty, (Entity) MetadataStore.Namespaces.Resolve ("exif:Columns"), new Literal (pattern.Columns.ToString (), null, null)); sink.Add (cols); Statement rows = new Statement (empty, (Entity) MetadataStore.Namespaces.Resolve ("exif:Rows"), new Literal (pattern.Rows.ToString (), null, null)); sink.Add (rows); string [] vals = e.ArrayToString (pattern.Values); MetadataStore.Add (sink, empty, "exif:Values", "rdf:Seq", vals); sink.Add (top); break; case TagId.ExifVersion: case TagId.FlashPixVersion: case TagId.ColorSpace: case TagId.CompressedBitsPerPixel: case TagId.PixelYDimension: case TagId.PixelXDimension: case TagId.RelatedSoundFile: case TagId.ExposureTime: case TagId.FNumber: case TagId.ExposureProgram: case TagId.SpectralSensitivity: case TagId.ShutterSpeedValue: case TagId.ApertureValue: case TagId.BrightnessValue: case TagId.ExposureBiasValue: case TagId.MaxApertureValue: case TagId.SubjectDistance: case TagId.MeteringMode: case TagId.LightSource: case TagId.FocalLength: case TagId.FlashEnergy: case TagId.FocalPlaneXResolution: case TagId.FocalPlaneYResolution: case TagId.FocalPlaneResolutionUnit: case TagId.ExposureIndex: case TagId.SensingMethod: case TagId.FileSource: case TagId.SceneType: case TagId.CustomRendered: case TagId.ExposureMode: case TagId.WhiteBalance: case TagId.DigitalZoomRatio: case TagId.FocalLengthIn35mmFilm: case TagId.SceneCaptureType: case TagId.GainControl: case TagId.Contrast: case TagId.Saturation: case TagId.Sharpness: MetadataStore.AddLiteral (sink, "exif:" + e.Id.ToString (), e.ValueAsString [0]); break; case TagId.ComponentsConfiguration: case TagId.ISOSpeedRatings: case TagId.SubjectArea: case TagId.SubjectLocation: MetadataStore.Add (sink, "exif:" + e.Id.ToString (), "rdf:Seq", e.ValueAsString); break; case TagId.TransferFunction: case TagId.YCbCrSubSampling: case TagId.WhitePoint: case TagId.PrimaryChromaticities: case TagId.YCbCrCoefficients: case TagId.ReferenceBlackWhite: case TagId.BitsPerSample: MetadataStore.Add (sink, "tiff:" + e.Id.ToString (), "rdf:Seq", e.ValueAsString); break; case TagId.Orientation: case TagId.Compression: case TagId.PhotometricInterpretation: case TagId.SamplesPerPixel: case TagId.PlanarConfiguration: case TagId.YCbCrPositioning: case TagId.ResolutionUnit: case TagId.ImageWidth: case TagId.ImageLength: case TagId.Model: case TagId.Make: MetadataStore.AddLiteral (sink, "tiff:" + e.Id.ToString (), e.ValueAsString [0]); break; } } }
void SelectSome(Statement template, StatementSink sink) { // Get a cursor over the first non-null component of template. int[] stmts; if (template.Subject != null) stmts = GetStatements(template.Subject, 0); else if (template.Predicate != null) stmts = GetStatements(template.Predicate, 1); else if (template.Object != null) stmts = GetStatements(template.Object, 2); else if (template.Meta != null) stmts = GetStatements(template.Meta, 3); else throw new InvalidOperationException(); if (stmts == null) return; Hashtable createdResources = new Hashtable(); for (int i = 0; i < stmts[0]; i++) { Quad q = Quad.Deserialize(stmts, 1 + i*4); Statement s = QuadToStatement(q, createdResources); if (template.Matches(s)) { if (!sink.Add(s)) return; } } }
public void Select(Statement template, StatementSink sink) { if (template.Meta != null && template.Meta != Statement.DefaultMeta) return; if (template.Predicate != null && template.Predicate.Uri == null) return; if (template.Subject == null && template.Predicate == null && template.Object == null) { Select(sink); } else if (template.Subject != null && template.Predicate != null && template.Object != null) { template.Meta = Statement.DefaultMeta; if (Contains(template)) sink.Add(template); } else if (template.Predicate == null) { if (template.Subject == null) SelectAllObject(template.Object, sink); else if (template.Object == null) SelectAllSubject(template.Subject, sink); else SelectRelationsBetween(template.Subject, template.Object, sink); } else if (template.Subject != null && template.Object == null) { SelectObjects(template.Subject, template.Predicate, sink); } else if (template.Subject == null && template.Object != null) { SelectSubjects(template.Predicate, template.Object, sink); } else if (template.Subject == null && template.Object == null) { SelectAllPairs(template.Predicate, sink); }
public void Select(SelectFilter filter, StatementSink sink) { if (filter.Predicates == null || filter.LiteralFilters != null) { data.Select(filter, sink); return; } ResSet remainingPredicates = new ResSet(); Entity[] subjects = filter.Subjects; Entity[] predicates = filter.Predicates; Resource[] objects = filter.Objects; Entity[] metas = filter.Metas; foreach (Entity p in predicates) { if (p == type) { if (objects != null) { // Do the subjects have any of the types listed in the objects, // or what things have those types? // Expand objects by the subclass closure of the objects data.Select(new SelectFilter(subjects, new Entity[] { p }, GetClosure(objects, subclasses), metas), sink); // Process domains and ranges. ResSet dom = new ResSet(), ran = new ResSet(); Hashtable domPropToType = new Hashtable(); Hashtable ranPropToType = new Hashtable(); foreach (Entity e in objects) { Entity[] dc = GetClosure((ResSet)domainof[e], subprops); if (dc != null) foreach (Entity c in dc) { dom.Add(c); AddRelation(c, e, domPropToType, false); } dc = GetClosure((ResSet)rangeof[e], subprops); if (dc != null) foreach (Entity c in dc) { ran.Add(c); AddRelation(c, e, ranPropToType, false); } } // If it's in the domain of any of these properties, // we know its type. if (subjects != null) { if (dom.Count > 0) data.Select(new SelectFilter(subjects, dom.ToEntityArray(), null, metas), new ExpandDomRan(0, domPropToType, sink)); if (ran.Count > 0) data.Select(new SelectFilter(null, ran.ToEntityArray(), subjects, metas), new ExpandDomRan(1, ranPropToType, sink)); } } else if (subjects != null) { // What types do these subjects have? // Expand the resulting types by the closure of their superclasses data.Select(new SelectFilter(subjects, new Entity[] { p }, objects, metas), new Expand(superclasses, sink)); // Use domains and ranges to get type info data.Select(new SelectFilter(subjects, null, null, metas), new Expand3(0, domains, superclasses, sink)); data.Select(new SelectFilter(null, null, subjects, metas), new Expand3(1, ranges, superclasses, sink)); } else { // What has type what? We won't answer that question. data.Select(filter, sink); } } else if ((p == subClassOf || p == subPropertyOf) && (metas == null || metas[0] == Statement.DefaultMeta)) { Hashtable supers = (p == subClassOf) ? superclasses : superprops; Hashtable subs = (p == subClassOf) ? subclasses : subprops; if (subjects != null && objects != null) { // Expand objects by the subs closure of the objects. data.Select(new SelectFilter(subjects, new Entity[] { p }, GetClosure(objects, subs), metas), sink); } else if (subjects != null) { // get all of the supers of all of the subjects foreach (Entity s in subjects) foreach (Entity o in GetClosure(new Entity[] { s }, supers)) sink.Add(new Statement(s, p, o)); } else if (objects != null) { // get all of the subs of all of the objects foreach (Resource o in objects) { if (o is Literal) continue; foreach (Entity s in GetClosure(new Entity[] { (Entity)o }, subs)) sink.Add(new Statement(s, p, (Entity)o)); } } else { // What is a subclass/property of what? We won't answer that. data.Select(filter, sink); } } else { remainingPredicates.Add(p); } } if (remainingPredicates.Count > 0) { // Also query the subproperties of any property // being queried, but remember which subproperties // came from which superproperties so we can map them // back to the properties actually queried. The closures // contain the queried properties themselves too. ResSet qprops = new ResSet(); Hashtable propfrom = new Hashtable(); foreach (Entity p in remainingPredicates) { foreach (Entity sp in GetClosure(new Entity[] { p }, subprops)) { AddRelation(sp, p, propfrom, false); qprops.Add(sp); } } //data.Select(subjects, qprops.ToEntityArray(), objects, metas, new LiteralDTMap(ranges, new PredMap(propfrom, sink))); SelectFilter sf = new SelectFilter(subjects, qprops.ToEntityArray(), objects, metas); sf.LiteralFilters = filter.LiteralFilters; sf.Limit = filter.Limit; data.Select(sf, new PredMap(propfrom, sink)); } }
/// <summary> /// Selects Statements that match a given Template /// </summary> /// <param name="template">Statement Template</param> /// <param name="sink">Sink</param> public void Select(Statement template, StatementSink sink) { //Implement as a SPARQL SELECT List<ITriplePattern> patterns = this.TemplateToTriplePatterns(template); StringBuilder query = new StringBuilder(); query.AppendLine("SELECT * WHERE {"); foreach (ITriplePattern pattern in patterns) { query.AppendLine(pattern.ToString() + "."); } query.AppendLine("}"); //Get the Results Object results = this._store.ExecuteQuery(query.ToString()); if (results is SparqlResultSet) { SparqlResultSet rset = (SparqlResultSet)results; foreach (SparqlResult r in rset) { Entity s = (template.Subject != null) ? template.Subject : SemWebConverter.ToSemWebEntity(r["s"], this._mapping); Entity p = (template.Predicate != null) ? template.Predicate : SemWebConverter.ToSemWebEntity(r["p"], this._mapping); Resource o = (template.Object != null) ? template.Object : SemWebConverter.ToSemWeb(r["o"], this._mapping); Statement stmt = new Statement(s, p, o); //Keep returning stuff until the sink tells us to stop if (!sink.Add(stmt)) return; } } }
public void Select (Statement template, StatementSink sink) { // extract the fields for easy access Entity subj = template.Subject; Entity pred = template.Predicate; Resource obj = template.Object; // convert the SemWeb fields to the RDFQuery fields Uri s; string p, o; rdf_to_beagle_hook (subj, pred, obj, out s, out p, out o); RDFQuery query = new RDFQuery (s, p, o); RDFQueryResult result = (RDFQueryResult) query.Send (); foreach (Hit hit in result.Hits) { Entity subject = new Entity (hit.Uri.ToString ()); //FIXME: Do we have to use strings here? foreach (Property prop in hit.Properties) { Entity predicate = BeaglePropertyToEntity (prop.Type, prop.Key); Resource _object; property_to_rdf_hook (prop, out _object); // now create a the statement and add it to the result Statement st = new Statement (subject, predicate, _object); sink.Add (st); } } }
public override void Select(SelectFilter filter, StatementSink result) { ResSet s = filter.Subjects == null ? null : new ResSet(filter.Subjects), p = filter.Predicates == null ? null : new ResSet(filter.Predicates), o = filter.Objects == null ? null : new ResSet(filter.Objects), m = filter.Metas == null ? null : new ResSet(filter.Metas); foreach (Statement st in statements) { if (s != null && !s.Contains(st.Subject)) continue; if (p != null && !p.Contains(st.Predicate)) continue; if (o != null && !o.Contains(st.Object)) continue; if (m != null && !m.Contains(st.Meta)) continue; if (filter.LiteralFilters != null && !LiteralFilter.MatchesFilters(st.Object, filter.LiteralFilters, this)) continue; if (!result.Add(st)) return; } }
private void Add(StatementSink store, Statement statement, Location position) { try { store.Add(statement); } catch (Exception e) { if (!addFailuresAsWarnings) OnError("Add failed on statement { " + statement + " }: " + e.Message, position, e); else OnWarning("Add failed on statement { " + statement + " }: " + e.Message, position, e); } }
/// <summary> /// Selects Statements from the Source based on a Filter /// </summary> /// <param name="filter">Statement Filter</param> /// <param name="sink">Sink to stream results to</param> public void Select(SelectFilter filter, StatementSink sink) { //Don't support filters on Metas for the Graph Source if (filter.Metas != null) { throw new RdfException("The dotNetRDF GraphSource does not support SemWeb filters which use Meta filters"); } //Want to build an IEnumerable based on the Filter IEnumerable<Triple> ts = Enumerable.Empty<Triple>(); INode s, p, o; if (filter.Subjects != null) { if (filter.Predicates != null) { //Subject-Predicate filter foreach (Entity subj in filter.Subjects) { s = SemWebConverter.FromSemWeb(subj, this._mapping); foreach (Entity pred in filter.Predicates) { p = SemWebConverter.FromSemWeb(pred, this._mapping); ts = ts.Concat(this._g.GetTriplesWithSubjectPredicate(s, p)); } } } else if (filter.Objects != null) { //Subject-Object filter foreach (Entity subj in filter.Subjects) { s = SemWebConverter.FromSemWeb(subj, this._mapping); foreach (Resource obj in filter.Objects) { o = SemWebConverter.FromSemWeb(obj, this._mapping); ts = ts.Concat(this._g.GetTriplesWithSubjectObject(s, o)); } } } else { //Subjects filter foreach (Entity subj in filter.Subjects) { s = SemWebConverter.FromSemWeb(subj, this._mapping); ts = ts.Concat(this._g.GetTriplesWithSubject(s)); } } } else if (filter.Predicates != null) { if (filter.Objects != null) { //Predicate-Object Filter foreach (Entity pred in filter.Predicates) { p = SemWebConverter.FromSemWeb(pred, this._mapping); foreach (Resource obj in filter.Objects) { o = SemWebConverter.FromSemWeb(obj, this._mapping); ts = ts.Concat(this._g.GetTriplesWithPredicateObject(p,o)); } } } else { //Predicate Filter foreach (Entity pred in filter.Predicates) { p = SemWebConverter.FromSemWeb(pred, this._mapping); ts = ts.Concat(this._g.GetTriplesWithPredicate(p)); } } } else if (filter.Objects != null) { //Object Filter foreach (Resource obj in filter.Objects) { o = SemWebConverter.FromSemWeb(obj, this._mapping); ts = ts.Concat(this._g.GetTriplesWithObject(o)); } } else { //Everything is null so this is a Select All ts = this._g.Triples; } int count = 0; foreach (Triple t in ts) { //Apply limit if applicable if (filter.Limit > 0 && count >= filter.Limit) return; //Convert to a Statement and apply applicable Literal Filters Statement stmt = SemWebConverter.ToSemWeb(t, this._mapping); if (filter.LiteralFilters != null) { if (LiteralFilter.MatchesFilters(stmt.Object, filter.LiteralFilters, this)) { //If the Object matched the filters then we return the Triple and stop //streaming if the sink tells us to if (!sink.Add(stmt)) return; count++; } //If it doesn't match the filter it is ignored } else { //Just add the statement and stop if the sink tells us to stop streaming if (!sink.Add(stmt)) return; count++; } } }
public override void Select(Statement template, StatementSink result) { StatementList source = statements; // The first time select is called, turn indexing on for the store. // TODO: Perform this index in a background thread if there are a lot // of statements. if (!isIndexed && allowIndexing) { isIndexed = true; for (int i = 0; i < StatementCount; i++) { Statement statement = this[i]; GetIndexArray(statementsAboutSubject, statement.Subject).Add(statement); GetIndexArray(statementsAboutObject, statement.Object).Add(statement); } } if (template.Subject != null) ShorterList(ref source, GetIndexArray(statementsAboutSubject, template.Subject)); else if (template.Object != null) ShorterList(ref source, GetIndexArray(statementsAboutObject, template.Object)); if (source == null) return; for (int i = 0; i < source.Count; i++) { Statement statement = source[i]; if (!template.Matches(statement)) continue; if (!result.Add(statement)) return; } }
/// <summary> /// Selects Statements from the Source based on a Filter /// </summary> /// <param name="filter">Statement Filter</param> /// <param name="sink">Sink to stream results to</param> public void Select(SelectFilter filter, StatementSink sink) { IEnumerable<Triple> ts = Enumerable.Empty<Triple>(); if (filter.Metas != null) { //This applies over some Graphs foreach (Entity meta in filter.Metas) { if (meta.Uri != null) { Uri graphUri = new Uri(meta.Uri); if (this._store.HasGraph(graphUri)) { ts = ts.Concat(this.FilterToEnumerable(filter, this._store.Graph(graphUri))); } } } } else { //This applies over all Graphs foreach (IGraph g in this._store.Graphs) { ts = ts.Concat(this.FilterToEnumerable(filter, g)); } } int count = 0; foreach (Triple t in ts) { //Apply limit if applicable if (filter.Limit > 0 && count >= filter.Limit) return; Statement stmt = SemWebConverter.ToSemWeb(t, this.GetMapping(t.Graph)); stmt.Meta = new Entity(t.GraphUri.ToString()); if (filter.LiteralFilters != null) { if (LiteralFilter.MatchesFilters(stmt.Object, filter.LiteralFilters, this)) { //If the Object matched the filters then we return the Triple and stop //streaming if the sink tells us to if (!sink.Add(stmt)) return; count++; } } else { //Just add the statement and stop if the sink tells us to stop streaming if (!sink.Add(stmt)) return; count++; } } }
bool Select(Entity[] subjects, Entity[] predicates, Resource[] objects, Entity[] metas, LiteralFilter[] litFilters, int limit, StatementSink sink, bool ask) { // TODO: Change meta into named graphs. Anything but a null or DefaultMeta // meta returns no statements immediately. if (metas != null && (metas.Length != 1 || metas[0] != Statement.DefaultMeta)) return false; string query; bool nonull = false; if (subjects != null && subjects.Length == 1 && predicates != null && predicates.Length == 1 && objects != null && objects.Length == 1) { query = "ASK WHERE { " + S(subjects[0], null) + " " + S(predicates[0], null) + " " + S(objects[0], null) + "}"; nonull = true; } else { if (ask) query = "ASK { "; else query = "SELECT * WHERE { "; query += S(subjects, "?subject"); query += " "; query += S(predicates, "?predicate"); query += " "; query += S(objects, "?object"); query += " . "; query += SL(subjects, "?subject", false); query += SL(predicates, "?predicate", false); query += SL(objects, "?object", false); query += " }"; // TODO: Pass literal filters to server. } if (limit >= 1) query += " LIMIT " + limit; Statement d = new Statement( (subjects != null && subjects.Length == 1) ? subjects[0] : null, (predicates != null && predicates.Length == 1) ? predicates[0] : null, (objects != null && objects.Length == 1) ? objects[0] : null); if (ask || nonull) { BooleanWrap bw = new BooleanWrap(); Load(query, bw); if (ask) return bw.value; else if (bw.value) sink.Add(new Statement(subjects[0], predicates[0], objects[0])); return false; } else { Load(query, new QueryToStatements(sink, litFilters, d)); return true; } }
/// <summary> /// Selects all statements from this source and streams them into the given Sink /// </summary> /// <param name="sink">Statement Sink</param> public void Select(StatementSink sink) { foreach (IGraph g in this._store.Graphs) { //Get the Hash Code of the Graphs URI and create a new empty mapping if necessary Entity graphUri; int hash; if (g.BaseUri == null) { graphUri = new Entity(GraphCollection.DefaultGraphUri); hash = new Uri(GraphCollection.DefaultGraphUri).GetEnhancedHashCode(); } else { graphUri = new Entity(g.BaseUri.ToString()); hash = g.BaseUri.GetEnhancedHashCode(); } SemWebMapping mapping = this.GetMapping(hash, g); foreach (Triple t in g.Triples) { Statement stmt = SemWebConverter.ToSemWeb(t, mapping); stmt.Meta = graphUri; if (!sink.Add(stmt)) return; } } }
public static void AddLiteral (StatementSink sink, string predicate, string value) { Statement stmt = new Statement (FSpotXMPBase, (Entity)MetadataStore.Namespaces.Resolve (predicate), new SemWeb.Literal (value)); sink.Add (stmt); }
/// <summary> /// Selects all statements from this source and streams them into the given Sink /// </summary> /// <param name="sink">Statement Sink</param> /// <remarks> /// This is essentially the same code as the <see cref="SemWebConverter.ToSemWeb">ToSemWeb(IGraph g, StatementSink sink)</see> function but we need to maintain a consistent mapping of BNodes for the source /// </remarks> public void Select(StatementSink sink) { foreach (Triple t in this._g.Triples) { Statement stmt = SemWebConverter.ToSemWeb(t, this._mapping); if (!sink.Add(stmt)) return; } }
private void Select(Resource templateSubject, Resource templatePredicate, Resource templateObject, Resource templateMeta, LiteralFilter[] litFilters, StatementSink result, int limit) { if (result == null) throw new ArgumentNullException(); lock (syncroot) { Init(); RunAddBuffer(); // Don't select on columns that we already know from the template. // But grab the URIs and literal values for MultiRes selection. SelectColumnFilter columns = new SelectColumnFilter(); columns.SubjectId = (templateSubject == null) || templateSubject is MultiRes; columns.PredicateId = (templatePredicate == null) || templatePredicate is MultiRes; columns.ObjectId = (templateObject == null) || templateObject is MultiRes; columns.MetaId = (templateMeta == null) || templateMeta is MultiRes; columns.SubjectUri = templateSubject == null; columns.PredicateUri = templatePredicate == null; columns.ObjectData = templateObject == null || (templateObject is MultiRes && ((MultiRes)templateObject).ContainsLiterals()); columns.MetaUri = templateMeta == null; if (isOrContains(templatePredicate, rdfs_member)) { columns.PredicateId = true; columns.PredicateUri = true; } // Meta URIs tend to be repeated a lot, so we don't // want to ever select them from the database. // This preloads them, although it makes the first // select quite slow. /*if (templateMeta == null && SupportsSubquery) { LoadMetaEntities(); columns.MetaUri = false; }*/ // Have to select something bool fakeSubjectIdSelect = false; if (!columns.SubjectId && !columns.PredicateId && !columns.ObjectId && !columns.MetaId) { columns.SubjectId = true; fakeSubjectIdSelect = true; } // Pre-cache the IDs of resources in a MultiRes. TODO: Pool these into one array. foreach (Resource r in new Resource[] { templateSubject, templatePredicate, templateObject, templateMeta }) { MultiRes mr = r as MultiRes; if (mr == null) continue; PrefetchResourceIds(mr.items); CleanMultiRes(mr); if (mr.items.Length == 0) // no possible values return; } // SQLite has a problem with LEFT JOIN: When a condition is made on the // first table in the ON clause (q.objecttype=0/1), when it fails, // it excludes the row from the first table, whereas it should only // exclude the results of the join. System.Text.StringBuilder cmd = new System.Text.StringBuilder("SELECT "); if (!SupportsLimitClause && limit >= 1) { cmd.Append("TOP "); cmd.Append(limit); cmd.Append(' '); } if (!HasUniqueStatementsConstraint) cmd.Append("DISTINCT "); SelectFilterColumns(columns, cmd); cmd.Append(" FROM "); cmd.Append(table); cmd.Append("_statements AS q"); SelectFilterTables(columns, cmd); cmd.Append(' '); bool wroteWhere; if (!WhereClause(templateSubject, templatePredicate, templateObject, templateMeta, cmd, out wroteWhere)) return; // Transform literal filters into SQL. if (litFilters != null) { foreach (LiteralFilter f in litFilters) { string s = FilterToSQL(f, "lit.value"); if (s != null) { if (!wroteWhere) { cmd.Append(" WHERE "); wroteWhere = true; } else { cmd.Append(" AND "); } cmd.Append(' '); cmd.Append(s); } } } if (SupportsLimitClause && limit >= 1) { cmd.Append(" LIMIT "); cmd.Append(limit); } cmd.Append(';'); if (Debug) { string cmd2 = cmd.ToString(); //if (cmd2.Length > 80) cmd2 = cmd2.Substring(0, 80); Console.Error.WriteLine(cmd2); } Hashtable entMap = new Hashtable(); // Be sure if a MultiRes is involved we hash the // ids of the entities so we can return them // without creating new ones. CacheMultiObjects(entMap, templateSubject); CacheMultiObjects(entMap, templatePredicate); CacheMultiObjects(entMap, templateObject); CacheMultiObjects(entMap, templateMeta); using (IDataReader reader = RunReader(cmd.ToString())) { while (reader.Read()) { int col = 0; Int64 sid = -1, pid = -1, ot = -1, oid = -1, mid = -1; string suri = null, puri = null, ouri = null, muri = null; string lv = null, ll = null, ld = null; if (columns.SubjectId) { sid = reader.GetInt64(col++); } if (columns.PredicateId) { pid = reader.GetInt64(col++); } if (columns.ObjectId) { oid = reader.GetInt64(col++); } if (columns.MetaId) { mid = reader.GetInt64(col++); } if (columns.SubjectUri) { suri = AsString(reader[col++]); } if (columns.PredicateUri) { puri = AsString(reader[col++]); } if (columns.ObjectData) { ot = reader.GetInt64(col++); ouri = AsString(reader[col++]); lv = AsString(reader[col++]); ll = AsString(reader[col++]); ld = AsString(reader[col++]);} if (columns.MetaUri) { muri = AsString(reader[col++]); } Entity subject = GetSelectedEntity(sid, suri, templateSubject, columns.SubjectId && !fakeSubjectIdSelect, columns.SubjectUri, entMap); Entity predicate = GetSelectedEntity(pid, puri, templatePredicate, columns.PredicateId, columns.PredicateUri, entMap); Resource objec = GetSelectedResource(oid, ot, ouri, lv, ll, ld, templateObject, columns.ObjectId, columns.ObjectData, entMap); Entity meta = GetSelectedEntity(mid, muri, templateMeta, columns.MetaId, columns.MetaUri, templateMeta != null ? entMap : null); if (litFilters != null && !LiteralFilter.MatchesFilters(objec, litFilters, this)) continue; bool ret = result.Add(new Statement(subject, predicate, objec, meta)); if (!ret) break; } } } // lock }
void ReadLoop(StatementSink sink) { while (!finished) { int nr = nextRead; // Check that we can advance (i.e. not cross the write pointer). while (nr == nextWrite && !finished) { if (SLEEP_DURATION > 0) Thread.Sleep(SLEEP_DURATION); else hasData.WaitOne(); } if (finished) return; int nw = nextWrite; int addctr = 0; while (nr != nw) { Statement s = buffer[nr]; nr = (nr == len-1) ? 0 : nr+1; if ((addctr++ & 0xFF) == 0) { nextRead = nr; hasSpace.Set(); } canceled = !sink.Add(s); if (canceled) break; } nextRead = nr; hasSpace.Set(); if (canceled) break; } }
/// <summary> /// Selects Statements from the Source based on a Template /// </summary> /// <param name="template">Statement Template</param> /// <param name="sink">Sink to stream results to</param> public void Select(Statement template, StatementSink sink) { //Convert Template to an Enumerable IEnumerable<Triple> ts = Enumerable.Empty<Triple>(); int hash; if (template.Meta != Statement.DefaultMeta && template.Meta != null) { //Select from the specific Graph if it exists Uri graphUri; if (template.Meta.Uri == null) { hash = new Uri(GraphCollection.DefaultGraphUri).GetEnhancedHashCode(); graphUri = null; } else { graphUri = new Uri(template.Meta.Uri); hash = graphUri.GetEnhancedHashCode(); } if (this._store.HasGraph(graphUri)) { ts = this.TemplateToEnumerable(template, this._store.Graph(graphUri)); SemWebMapping mapping = this.GetMapping(hash, this._store.Graph(graphUri)); foreach (Triple t in ts) { //Keep streaming Triples until the sink tells us to stop Statement stmt = SemWebConverter.ToSemWeb(t, mapping); if (!sink.Add(stmt)) return; } } } else { //Output the results from each Graph in turn foreach (IGraph g in this._store.Graphs) { Entity graphUri; if (g.BaseUri == null) { hash = new Uri(GraphCollection.DefaultGraphUri).GetEnhancedHashCode(); graphUri = new Entity(GraphCollection.DefaultGraphUri); } else { hash = g.BaseUri.GetEnhancedHashCode(); graphUri = new Entity(g.BaseUri.ToString()); } SemWebMapping mapping = this.GetMapping(hash, g); foreach (Triple t in this.TemplateToEnumerable(template, g)) { Statement stmt = SemWebConverter.ToSemWeb(t, mapping); stmt.Meta = graphUri; if (!sink.Add(stmt)) return; } } } }
void WriteGraph(RdfGraph graph, RdfSourceWrapper sourcewrapper, StatementSink sink) { if (sink is RdfWriter) ((RdfWriter)sink).Namespaces.AddFrom(GetQueryPrefixes()); java.util.Iterator iter = graph.iterator(); while (iter.hasNext()) { GraphStatement stmt = (GraphStatement)iter.next(); Statement s; if (stmt is GraphStatementWrapper) { s = ((GraphStatementWrapper)stmt).s; } else { s = new Statement( sourcewrapper.ToEntity(stmt.getSubject()), sourcewrapper.ToEntity(stmt.getPredicate()), sourcewrapper.ToResource(stmt.getObject()), stmt.getGraphName() == null ? Statement.DefaultMeta : sourcewrapper.ToEntity(stmt.getGraphName())); } if (s.AnyNull) continue; // unbound variable, or literal in bad position sink.Add(s); } }
private void Select2(SelectFilter filter, StatementSink result) { // Don't select on columns that we already know from the template. SelectColumnFilter columns = new SelectColumnFilter(); columns.Subject = (filter.Subjects == null) || (filter.Subjects.Length > 1); columns.Predicate = (filter.Predicates == null) || (filter.Predicates.Length > 1); columns.Object = (filter.Objects == null) || (filter.Objects.Length > 1); columns.Meta = (filter.Metas == null) || (filter.Metas.Length > 1);; if (filter.Predicates != null | Array.IndexOf(filter.Predicates, rdfs_member) != 1) columns.Predicate = true; // Have to select something if (!columns.Subject && !columns.Predicate && !columns.Object && !columns.Meta) columns.Subject = true; System.Text.StringBuilder cmd = new System.Text.StringBuilder("SELECT "); if (!connection.AreStatementsUnique) cmd.Append("DISTINCT "); ArrayList cols = new ArrayList(); if (columns.Subject) { cols.Add("sinfo.type"); cols.Add("sinfo.value"); } if (columns.Predicate) { cols.Add("pinfo.type"); cols.Add("pinfo.value"); } if (columns.Object) { cols.Add("oinfo.type"); cols.Add("oinfo.value"); cols.Add("oinfo.language"); cols.Add("oinfo.datatype"); } if (columns.Meta) { cols.Add("minfo.type"); cols.Add("minfo.value"); } cmd.Append(String.Join(", ", (String[])cols.ToArray(typeof(String)))); cmd.Append(" FROM "); cmd.Append(prefix); cmd.Append("_statements AS q"); if (columns.Subject) { cmd.Append(" LEFT JOIN "); cmd.Append(prefix); cmd.Append("_values AS sinfo ON q.subject = sinfo.id"); } if (columns.Predicate) { cmd.Append(" LEFT JOIN "); cmd.Append(prefix); cmd.Append("_values AS pinfo ON q.predicate = pinfo.id"); } if (columns.Object) { cmd.Append(" LEFT JOIN "); cmd.Append(prefix); cmd.Append("_values AS oinfo ON q.object = oinfo.id"); } if (columns.Meta) { cmd.Append(" LEFT JOIN "); cmd.Append(prefix); cmd.Append("_values AS minfo ON q.meta = minfo.id"); } cmd.Append(' '); bool wroteWhere = WhereClause(filter, cmd); // Transform literal filters into SQL. if (filter.LiteralFilters != null) { foreach (LiteralFilter f in filter.LiteralFilters) { string s = FilterToSQL(f, "oinfo.value"); if (s != null) { if (!wroteWhere) { cmd.Append(" WHERE "); wroteWhere = true; } else { cmd.Append(" AND "); } cmd.Append(' '); cmd.Append(s); } } } if (filter.Limit >= 1) { cmd.Append(" LIMIT "); cmd.Append(filter.Limit); } cmd.Append(';'); if (Debug) { string cmd2 = cmd.ToString(); //if (cmd2.Length > 80) cmd2 = cmd2.Substring(0, 80); Console.Error.WriteLine(cmd2); } using (IDataReader reader = connection.RunReader(cmd.ToString())) { while (reader.Read()) { Entity s = columns.Subject ? null : filter.Subjects[0]; Entity p = columns.Predicate ? null : filter.Predicates[0]; Resource o = columns.Object ? null : filter.Objects[0]; Entity m = columns.Meta ? null : filter.Metas[0]; int col = 0; if (columns.Subject) { s = SelectEntity(reader.GetInt32(col++), reader.GetString(col++)); } if (columns.Predicate) { p = SelectEntity(reader.GetInt32(col++), reader.GetString(col++)); } if (columns.Object) { o = SelectResource(reader.GetInt32(col++), reader.GetString(col++), reader.GetString(col++), reader.GetString(col++)); } if (columns.Meta) { m = SelectEntity(reader.GetInt32(col++), reader.GetString(col++)); } if (filter.LiteralFilters != null && !LiteralFilter.MatchesFilters(o, filter.LiteralFilters, this)) continue; bool ret = result.Add(new Statement(s, p, o, m)); if (!ret) break; } } }
/// <summary> /// Selects Statements from the Source based on a Template /// </summary> /// <param name="template">Statement Template</param> /// <param name="sink">Sink to stream results to</param> public void Select(Statement template, StatementSink sink) { //Convert Template to an Enumerable IEnumerable<Triple> ts = this.TemplateToEnumerable(template); foreach (Triple t in ts) { //Keep streaming Triples until the sink tells us to stop Statement stmt = SemWebConverter.ToSemWeb(t, this._mapping); if (!sink.Add(stmt)) return; } }