static public bool CheckQuery (Query query, FileSystemObject root) { // Find the set of objects that we expect to match the query, // based on our knowledge of the current state of the tree. ICollection matching_fsos; matching_fsos = root.RecursiveQuery (query); // Query the daemon and get the actual list of hits. Hashtable matching_hits; matching_hits = QueryFu.GetHits (query); bool success; success = true; foreach (FileSystemObject fso in matching_fsos) { string uri = UriFu.UriToEscapedString (fso.Uri); if (matching_hits.Contains (uri)) matching_hits.Remove (uri); else { Log.Failure ("Hit missing from beagled query results: {0}", uri); success = false; } } foreach (Hit hit in matching_hits.Values) { Log.Failure ("Unexpected extra hit in beagled query results: {0}", hit.Uri); Log.Failure (" Properties:"); foreach (Property prop in hit.Properties) Log.Failure (" {0} = {1}", prop.Key, prop.Value); success = false; } return success; }
public bool AcceptQuery (Query query) { return query != null && (query.IsIndexListener || ! query.IsEmpty) && query.AllowsDomain (Domain) && iqueryable.AcceptQuery (query); }
public Tile (Hit hit, Query query) : base () { base.AboveChild = true; base.AppPaintable = true; base.CanFocus = true; this.hit = hit; this.query = query; this.timestamp = hit.Timestamp; this.score = hit.Score; Gtk.Drag.SourceSet (this, Gdk.ModifierType.Button1Mask, targets, Gdk.DragAction.Copy | Gdk.DragAction.Move); int pad = (int)StyleGetProperty ("focus-line-width") + (int)StyleGetProperty ("focus-padding") + 1; hbox = new Gtk.HBox (false, 5); hbox.BorderWidth = (uint)(pad + Style.Xthickness); hbox.Show (); icon = new Gtk.Image (); icon.Show (); hbox.PackStart (icon, false, false, 0); Add (hbox); }
public Hit (uint id, Beagle.Hit hit, string[] fields, Query query) { this.id = id; bHit = hit; hitValue = new object[fields.Length]; int i = 0; uri = hit.Uri; foreach (string field in fields) { // We add String.Empty to attributes because they may be null and we cannot // return null objects over DBus string[] bfields = Ontologies.XesamToBeagleField (field); switch (bfields [0]) { case "uri": hitValue [i++] = hit.Uri.ToString (); break; case "mimetype": hitValue [i++] = hit.MimeType + String.Empty; break; case "date": hitValue [i++] = hit.Timestamp.ToString ("s"); break; case "snippet": SnippetRequest sreq = new SnippetRequest (query, hit); SnippetResponse sresp = (SnippetResponse) sreq.Send (); hitValue [i++] = sresp.Snippet != null ? sresp.Snippet : String.Empty; break; default: // FIXME: This *will* break since we don't know what the expected // type here is, and we're always using strings List<string> values = new List<string> (); foreach (string bfield in bfields) { string[] prop = hit.GetProperties (bfield); if (prop != null) values.AddRange (prop); } if (values.Count == 0) // No values found hitValue [i++] = String.Empty; else if (values.Count == 1) // Only one value -- return as string hitValue [i++] = values [0]; else // Multiple values -- returns as string[] hitValue [i++] = values.ToArray (); break; } } }
public void DoQuery (Query query, IQueryResult result, IQueryableChangeData change_data) { try { iqueryable.DoQuery (query, result, change_data); } catch (Exception ex) { Logger.Log.Warn (ex, "Caught exception calling DoQuery on '{0}'", Name); } }
public void DoQuery (Query query, IQueryResult result, IQueryableChangeData data) { // Get rid of the standard UnixTransport so that we can // forward our local query to remote hosts. query.Transports.Clear (); List<string[]> network_services = Conf.Networking.GetListOptionValues (Conf.Names.NetworkServices); if (network_services != null) { foreach (string[] service in network_services) query.RegisterTransport (new HttpTransport (service [1])); } // Anonymous delegates cannot be un-registered ... hence Query.HitsAdded hits_added_handler; hits_added_handler = delegate (HitsAddedResponse response) { //Console.WriteLine ("Adding hits added response"); TransformResponse(response); result.Add (response.Hits, response.NumMatches); }; Query.HitsSubtracted hits_subtracted_handler; hits_subtracted_handler = delegate (HitsSubtractedResponse response) { // Console.WriteLine ("Adding hits subtracted response"); result.Subtract (response.Uris); }; Query.Finished finished_handler; finished_handler = delegate (FinishedResponse response) { //Console.WriteLine ("Adding finished response"); // NO-OP }; // FIXME: ClosedEvent ? Should be handled by HttpTransport but should we do something more query.HitsAddedEvent += hits_added_handler; query.HitsSubtractedEvent += hits_subtracted_handler; query.FinishedEvent += finished_handler; Exception throw_me = null; try { query.SendAsyncBlocking (); } catch (Exception ex) { throw_me = ex; } // FIXME FIXME FIXME: Live query does not work! query.HitsAddedEvent -= hits_added_handler; query.HitsSubtractedEvent -= hits_subtracted_handler; query.FinishedEvent -= finished_handler; query.Transports.Clear (); if (throw_me != null) throw throw_me; return; }
public SnippetRequest (Query query, Hit hit) : base (false) { this.QueryTerms = new string [query.StemmedText.Count]; int i = 0; foreach (string term in query.StemmedText) { this.QueryTerms [i] = term; ++i; } this.Hit = hit; }
static public Query NewTokenQuery (string token) { Query query; query = new Query (); QueryPart_Text part; part = new QueryPart_Text (); part.Text = token; query.AddPart (part); return query; }
public BeaglePlugin() { server = Daemon.Server; db = Daemon.DefaultDatabase; log = Daemon.Log; query = new Query (); query.AddDomain (QueryDomain.Neighborhood); query.MaxHits = 10000; QueryPart_Property filePart = new QueryPart_Property (); filePart.Type = PropertyType.Keyword; filePart.Key = "beagle:HitType"; filePart.Value = "File"; query.AddPart (filePart); QueryPart_Or queryUnion = new QueryPart_Or (); foreach (string mt in supportedMimeTypes) { QueryPart_Property part = new QueryPart_Property (); part.Type = PropertyType.Keyword; part.Key = "beagle:MimeType"; part.Value = mt; queryUnion.Add (part); } query.AddPart (queryUnion); query.HitsAddedEvent += OnHitsAdded; query.HitsSubtractedEvent += OnHitsSubtracted; query.FinishedEvent += OnFinished; int attempts = 0; while (true) { try { query.SendAsync (); break; } catch (Exception e) { if (attempts++ >= 5) { log.Warn ("Failed to initialize beagle plugin"); query = null; break; } // something bad happened, wait a sec and try again log.Debug ("Sending query failed: " + e.Message); log.Debug ("Waiting 3 seconds..."); Thread.Sleep (3000); } } }
public IAsyncResult DoQuery (Query query, IQueryResult result, IQueryableChangeData changeData) { ICollection l; SearchRequest sreq = new SearchRequest(); l = query.Text; if ((l != null) && (l.Count > 0)) sreq.text = ICollection2StringList(query.Text); l = query.MimeTypes; if ((l != null) && (l.Count > 0)) sreq.mimeType = ICollection2StringList(query.MimeTypes); l = query.Sources; if ((l != null) && (l.Count > 0)) sreq.searchSources = ICollection2StringList(query.Sources); sreq.qdomain = QueryDomain.Global; //Caution: This Enables Cascaded NetBeagle searching ! //sreq.qdomain = QueryDomain.System; //Cache the query request, get a unique searchId and include in network searchRequest: sreq.searchId = NetworkedBeagle.AddRequest(query); int hc = NetworkedBeagle.HopCount(query); sreq.hopCount = (hc > 0) ? hc:1; log.Info("NetBeagleHandler: Starting WebService Query for " + Hostname + ":" + Port); ReqContext rc = new ReqContext(wsp, result, netBeagleQueryable); IAsyncResult ar = wsp.BeginBeagleQuery(sreq, DoQueryResponseHandler, rc); // Return w/o waiting for Async query to complete. // Return IAsynResult handle, to allow caller to control it, if required. return ar; }
public ICollection DoRDFQuery (Query query) { return iqueryable.DoRDFQuery (query); }
public bool AcceptQuery (Query query) { List<string[]> services = Conf.Networking.GetListOptionValues (Conf.Names.NetworkServices); return (services != null && services.Count > 0); }
public int DoCountMatchQuery (Query query) { return 0; }
public string doQuery(webArgs wargs) { if (wargs.sessId == null || wargs.searchString == null || wargs.searchString == "") return NO_RESULTS; log.Debug("WebBackEnd: Got Search String: " + wargs.searchString); Query query = new Query(); query.AddText (wargs.searchString); if (wargs.searchSource != null && wargs.searchSource != "") { query.AddSource(wargs.searchSource); query.AddDomain(QueryDomain.System); } else query.AddDomain (wargs.globalSearch ? QueryDomain.Global:QueryDomain.System); QueryResult qres = new QueryResult (); //Note: QueryDriver.DoQuery() local invocation is used. //The root tile is used only for adding hits and generating html. BT.SimpleRootTile root = new BT.SimpleRootTile (); root.Query = query; //root.SetSource (searchSource); Do not SetSource on root! ResultPair rp = new ResultPair(root); bufferRenderContext bctx = new bufferRenderContext(rp); Resp resp = new Resp(rp, bctx, wargs.isLocalReq); AttachQueryResult (qres, resp); //Add sessionId-Resp mapping if (sessionResp.Contains(wargs.sessId)) sessionResp[wargs.sessId] = resp; else sessionResp.Add(wargs.sessId, resp); log.Info("WebBackEnd: Starting Query for string \"{0}\"", wargs.searchString); QueryDriver.DoQueryLocal (query, qres); //Wait only till we have enough results to display while ((result.Contains(qres)) && (root.HitCollection.NumResults < 10)) Thread.Sleep(100); if (root.HitCollection.IsEmpty) return NO_RESULTS; lock (root) { root.Render(bctx); return (getResultsLabel(root) + (wargs.isLocalReq ? bctx.buffer:bctx.bufferForExternalQuery)); } }
public ICollection RecursiveQuery (Query query) { ArrayList matches; matches = new ArrayList (); DoRecursiveQuery (query, matches); return matches; }
public ICollection DoRDFQuery (Query query) { return null; }
static private Query NewRandomQuery (int length, bool allow_inexpensive, bool inside_an_or) { Query query; query = new Query (); // One in four queries will contain some OR terms. if (! inside_an_or && random.Next (4) == 0) { int N = random.Next (3) + 1; for (int i = 0; i < N; ++i) { QueryPart_Or part; part = new QueryPart_Or (); int sub_length; sub_length = random.Next (length) + 1; if (sub_length < 2) sub_length = 2; // We generate a new query at random, and stuff its QueryParts // into our Or QueryPart. Query or_query; or_query = NewRandomQuery (sub_length, allow_inexpensive, true); foreach (QueryPart sub_part in or_query.Parts) part.Add (sub_part); query.AddPart (part); } } if (allow_inexpensive && ! inside_an_or) { int mime_type; mime_type = random.Next (3); QueryPart_Or mime_type_part = new QueryPart_Or (); QueryPart_Property part; part = new QueryPart_Property (); part.Type = PropertyType.Keyword; part.Key = "beagle:MimeType"; if (mime_type == 0) { part.Value = "inode/directory"; mime_type_part.Add (part); query.AddPart (mime_type_part); } else if (mime_type == 1) { part.Value = "text/plain"; mime_type_part.Add (part); query.AddPart (mime_type_part); } } // Every query must contain at least // one required part. bool contains_required; contains_required = false; for (int i = 0; i < length; ++i) { QueryPart_Text part; part = new QueryPart_Text (); part.Text = Token.GetRandom (); // Prohibited parts are not allowed inside an or if (contains_required && ! inside_an_or) { if (random.Next (2) == 0) part.Logic = QueryPartLogic.Prohibited; } else { // This part will be required. contains_required = true; } if (random.Next (2) == 0) part.SearchTextProperties = false; else if (allow_inexpensive && random.Next (2) == 0) part.SearchFullText = false; query.AddPart (part); } // Note the ! inside_an_or; date range queries don't // work right inside OR queries when being searched // within the resolution of one day. See the FIXME // about hit filters in LuceneCommon.cs if (allow_inexpensive && ! inside_an_or && random.Next (3) == 0) { DateTime a, b; FileSystemObject.PickTimestampRange (out a, out b); QueryPart_DateRange part; part = new QueryPart_DateRange (); part.StartDate = a; part.EndDate = b; query.AddPart (part); } return query; }
public bool AcceptQuery (Query query) { // Always return false; there is nothing backing this // backend. return false; }
private void RecursiveQueryCheck (ArrayList match_list, Query query) { if (MatchesQuery (query)) match_list.Add (this); if (children != null) foreach (FileModel file in children.Values) file.RecursiveQueryCheck (match_list, query); }
private void Query(bool grab_focus) { if (timeout_id != 0) { GLib.Source.Remove(timeout_id); timeout_id = 0; } string query = query_text = entry.Text; if (String.IsNullOrEmpty(query)) { return; } SetWindowTitle(query); ShowInformation(null); if (QueryEvent != null) { QueryEvent(query); } view.Clear(); view.Scope = ScopeType.Everything; view.SortType = sort; pages.CurrentPage = pages.PageNum(panes); this.grab_focus = grab_focus; try { // Clean up our previous query, if any exists. DetachQuery(); TotalMatches = 0; current_query = new Query(); current_query.QueryDomain = domain; current_query.AddText(query); current_query.HitsAddedEvent += OnHitsAdded; current_query.HitsSubtractedEvent += OnHitsSubtracted; current_query.FinishedEvent += OnFinished; // Don't search documentation by default if (!search.DocsEnabled) { QueryPart_Property part = new QueryPart_Property(); part.Logic = QueryPartLogic.Prohibited; part.Type = PropertyType.Keyword; part.Key = "beagle:Source"; part.Value = "documentation"; current_query.AddPart(part); } // set scope from scope list ScopeMapping mapping = scope_mappings [scope_list.Active]; if (!String.IsNullOrEmpty(mapping.query_mapping)) { current_query.AddText(mapping.query_mapping); } current_query.SendAsync(); spinner.Start(); } catch (Beagle.ResponseMessageException) { pages.CurrentPage = pages.PageNum(startdaemon); } catch (Exception e) { Console.WriteLine("Querying the Beagle daemon failed: {0}", e.Message); } }
public void Dispose() { if (query != null) { query.HitsAddedEvent -= OnHitsAdded; query.FinishedEvent -= OnFinished; query.Close (); query = null; } foreach (Track track in trackHash.Values) { db.RemoveTrack (track); } }
static public void SpewQuery (Query query) { int i = 0; foreach (QueryPart abstract_part in query.Parts) { ++i; if (abstract_part is QueryPart_Or) { QueryPart_Or part = abstract_part as QueryPart_Or; int j = 0; Log.Spew ("{0}: OR", i); foreach (QueryPart sub_part in part.SubParts) { ++j; Log.Spew (" {0}.{1}: {2}", i, j, QueryPartToString (sub_part)); } } else { Log.Spew ("{0}: {1}", i, QueryPartToString (abstract_part)); } } }
virtual public bool MatchesQuery (Query query) { foreach (QueryPart abstract_part in query.Parts) { int is_match = 0; // Note that this works because we don't // allow nested or queries. if (abstract_part is QueryPart_Or) { QueryPart_Or part; part = (QueryPart_Or) abstract_part; is_match = -1; foreach (QueryPart sub_part in part.SubParts) { if (MatchesMetadata (sub_part) == 1 || MatchesQueryPart (sub_part)) { is_match = 1; break; } } } else { // Handle certain query parts related to file system metadata. is_match = MatchesMetadata (abstract_part); if (is_match == 0) is_match = MatchesQueryPart (abstract_part) ? 1 : -1; } if (abstract_part.Logic == QueryPartLogic.Prohibited) is_match = - is_match; if (is_match < 0) return false; else if (is_match == 0) throw new Exception ("This will never happen"); } return true; }
public int DoCountMatchQuery (Query query) { try { return iqueryable.DoCountMatchQuery (query); } catch (Exception ex) { Logger.Log.Warn (ex, "Caught exception calling DoCountMatchQuery on '{0}'", Name); return 0; } }
public void DoQuery (Query query, IQueryResult result, IQueryableChangeData data) { }
public QueryClosure (Query query) { this.Hits = UriFu.NewHashtable (); this.query = query; }
private void DoRecursiveQuery (Query query, ArrayList matches) { if (this.MatchesQuery (query)) matches.Add (this); if (IsArchive && !SearchInArchives) return; if (this.HasChildren) foreach (FileSystemObject child in this.Children) child.DoRecursiveQuery (query, matches); }
public bool MatchesQuery (Query query) { // We assume the root node never matches any query. if (IsRoot) return false; foreach (QueryPart part in query.Parts) { if (! MatchesQueryPart (part)) return false; } return true; }
static public Hashtable GetHits (Query q) { QueryClosure qc; qc = new QueryClosure (q); q.HitsAddedEvent += qc.OnHitsAdded; q.FinishedEvent += qc.OnFinished; q.SendAsyncBlocking (); return qc.Hits; }
public ArrayList GetMatchingDescendants (Query query) { ArrayList match_list; match_list = new ArrayList (); RecursiveQueryCheck (match_list, query); return match_list; }
static public ICollection GetUris (Query q) { return GetHits (q).Keys; }