public void DoQuery (Query query, IQueryResult result, IQueryableChangeData data) { // Get rid of the standard UnixTransport so that we can // forward our local query to remote hosts. query.Transports.Clear (); List<string[]> network_services = Conf.Networking.GetListOptionValues (Conf.Names.NetworkServices); if (network_services != null) { foreach (string[] service in network_services) query.RegisterTransport (new HttpTransport (service [1])); } // Anonymous delegates cannot be un-registered ... hence Query.HitsAdded hits_added_handler; hits_added_handler = delegate (HitsAddedResponse response) { //Console.WriteLine ("Adding hits added response"); TransformResponse(response); result.Add (response.Hits, response.NumMatches); }; Query.HitsSubtracted hits_subtracted_handler; hits_subtracted_handler = delegate (HitsSubtractedResponse response) { // Console.WriteLine ("Adding hits subtracted response"); result.Subtract (response.Uris); }; Query.Finished finished_handler; finished_handler = delegate (FinishedResponse response) { //Console.WriteLine ("Adding finished response"); // NO-OP }; // FIXME: ClosedEvent ? Should be handled by HttpTransport but should we do something more query.HitsAddedEvent += hits_added_handler; query.HitsSubtractedEvent += hits_subtracted_handler; query.FinishedEvent += finished_handler; Exception throw_me = null; try { query.SendAsyncBlocking (); } catch (Exception ex) { throw_me = ex; } // FIXME FIXME FIXME: Live query does not work! query.HitsAddedEvent -= hits_added_handler; query.HitsSubtractedEvent -= hits_subtracted_handler; query.FinishedEvent -= finished_handler; query.Transports.Clear (); if (throw_me != null) throw throw_me; return; }
public void DoQuery (Query query, IQueryResult result, IQueryableChangeData changeData) { Logger.Log.Debug ("Kicking off a bugzilla query"); // FIXME - hard coding the url here XmlDocument xml = GetBugzillaXml (query.QuotedText); if (xml != null) { Hit hit = XmlBugToHit (xml, query.QuotedText); if (hit != null) result.Add (hit); } }
public void DoQuery(Query query, IQueryResult result, IQueryableChangeData changeData) { Logger.Log.Debug("Kicking off a bugzilla query"); // FIXME - hard coding the url here XmlDocument xml = GetBugzillaXml(query.QuotedText); if (xml != null) { Hit hit = XmlBugToHit(xml, query.QuotedText); if (hit != null) { result.Add(hit); } } }
public void DoQuery(Query query, IQueryResult result, IQueryableChangeData changeData) { StringBuilder sb = new StringBuilder(); foreach (QueryPart part in query.Parts) { if (part is QueryPart_Human) { sb.Append(((QueryPart_Human)part).QueryString); } else if (part is QueryPart_Text) { sb.Append(((QueryPart_Text)part).Text); } else { continue; // ignore other query parts } } Logger.Log.Debug("Querying google for '" + sb.ToString() + "'"); GoogleSearchResult gsr = gss.doGoogleSearch(googleKey, sb.ToString(), 0, maxResults, false, "", false, "", "", ""); ArrayList hits = new ArrayList(); int rank = 0; foreach (ResultElement elt in gsr.resultElements) { Hit hit = FromGoogleResultElement(elt, rank); hits.Add(hit); ++rank; } result.Add(hits, gsr.estimatedTotalResultsCount); }
public void DoQuery(Query query, IQueryResult result, IQueryableChangeData data) { string search = null; foreach (QueryPart qp in query.Parts) { if (qp is QueryPart_Text) { search = ((QueryPart_Text)qp).Text; break; } } if (String.IsNullOrEmpty(search)) { return; } SafeProcess pc = new SafeProcess(); // Double the max-hits since it is hard to tell locate to ignore // hidden files and directories; so we prune them later. // So if hidden files are returned first, you are doomed pc.Arguments = new string[] { "locate", "-P", "-e", "-l", (2 * query.MaxHits).ToString(), search }; pc.RedirectStandardOutput = true; pc.RedirectStandardError = false; pc.UseLangC = true; try { pc.Start(); } catch (Beagle.Util.SafeProcessException e) { Log.Error(e, "Error while running 'locate -P -e -l {0} {1}'", (2 * query.MaxHits), search); return; } string match = null; ArrayList result_batch = new ArrayList(); const int MAX_QUEUED_HITS = 25; Hit hit; int count = 0; using (StreamReader pout = new StreamReader(pc.StandardOutput)) { while (count < query.MaxHits && !pout.EndOfStream) { match = pout.ReadLine(); hit = PathToHit(match); if (hit == null) { continue; } result_batch.Add(hit); if (result_batch.Count >= MAX_QUEUED_HITS) { result.Add(result_batch); result_batch.Clear(); } count++; } } result.Add(result_batch, count); pc.Close(); }
public void DoQuery(Query query, IQueryResult query_result, IQueryableChangeData i_change_data) { ChangeData change_data = (ChangeData)i_change_data; ICollection added_uris = null; // Index listeners never return any initial matches. if (change_data == null && query.IsIndexListener) { return; } if (change_data != null) { if (change_data.RemovedUris != null) { query_result.Subtract(change_data.RemovedUris); } // If nothing was added, we can safely return now: this change // cannot have any further effect on an outstanding live query. if (change_data.AddedUris == null || change_data.AddedUris.Count == 0) { return; } added_uris = change_data.AddedUris; // If this is an index listener, we don't need to do a query: // we just build up synthethic hits and add them unconditionally. if (query.IsIndexListener) { ArrayList synthetic_hits = new ArrayList(); foreach (Uri uri in added_uris) { Hit hit = new Hit(); hit.Uri = uri; if (our_hit_filter != null) { bool accept = false; try { accept = our_hit_filter(hit); } catch (Exception e) { Log.Warn(e, "Caught an exception in HitFilter for {0}", hit.Uri); } if (!accept) { continue; } } synthetic_hits.Add(hit); } if (synthetic_hits.Count > 0) { query_result.Add(synthetic_hits); } return; } } Driver.DoQuery(query, query_result, added_uris, backend_query_part_hook, our_hit_filter); }
///<summary> /// GetData /// Gets the data object associated with the specified key ///<summary> public void GetData(T key, COMPARE compareType, IQueryResult resultKeys, CollectionOperation mergeType, CancellationToken token) { int result; ClusteredArrayList keyList = new ClusteredArrayList(); RedBlackNode <T> treeNode = rbTree; // begin at root IDictionaryEnumerator en = null; string pattern; WildcardEnabledRegex regex; HashVector skippedKeys = null; bool isStringValue = false; if (key is string) { isStringValue = true; } //lock (_mutex) try { rwLock.AcquireReaderLock(Timeout.Infinite); switch (compareType) { case COMPARE.EQ: // traverse tree until node is found while (treeNode != _sentinelNode) { if (token != null && token.IsCancellationRequested) { throw new OperationCanceledException(ExceptionsResource.OperationFailed); } if (isStringValue && treeNode.Key is string) { //result = string.Compare(treeNode.Key , key , true); result = treeNode.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = treeNode.Key.CompareTo(key); } if (result == 0) { lastNodeFound = treeNode; resultKeys.Add(treeNode.Data, mergeType); break; } if (result > 0) //treenode is Greater then the one we are looking. Move to Left branch { treeNode = treeNode.Left; } else { treeNode = treeNode.Right; //treenode is Less then the one we are looking. Move to Right branch. } } break; case COMPARE.NE: // traverse tree until node is found resultKeys = GetWrappedResult(resultKeys, key, true); en = GetEnumerator(); while (en.MoveNext()) { if (token != null && token.IsCancellationRequested) { throw new OperationCanceledException(ExceptionsResource.OperationFailed); } if (isStringValue && en.Key is string) { result = en.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = ((IComparable)en.Key).CompareTo(key); } if (result != 0) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } } break; case COMPARE.GT: resultKeys = GetWrappedResult(resultKeys, key, false); en = GetEnumerator(false); while (en.MoveNext()) { if (token != null && token.IsCancellationRequested) { throw new OperationCanceledException(ExceptionsResource.OperationFailed); } if (isStringValue && en.Key is string) { result = en.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = ((IComparable)en.Key).CompareTo(key); } if (result > 0) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } else { break; } } break; case COMPARE.LT: resultKeys = GetWrappedResult(resultKeys, key, false); en = GetEnumerator(true); while (en.MoveNext()) { if (token != null && token.IsCancellationRequested) { throw new OperationCanceledException(ExceptionsResource.OperationFailed); } if (isStringValue && en.Key is string) { result = en.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = ((IComparable)en.Key).CompareTo(key); } if (result < 0) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } else { break; } } break; case COMPARE.GTEQ: resultKeys = GetWrappedResult(resultKeys, key, false); en = this.GetEnumerator(false); while (en.MoveNext()) { if (token != null && token.IsCancellationRequested) { throw new OperationCanceledException(ExceptionsResource.OperationFailed); } if (isStringValue && en.Key is string) { result = en.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = ((IComparable)en.Key).CompareTo(key); } if (result >= 0) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } else { break; } } break; case COMPARE.LTEQ: resultKeys = GetWrappedResult(resultKeys, key, false); en = this.GetEnumerator(); while (en.MoveNext()) { if (token != null && token.IsCancellationRequested) { throw new OperationCanceledException(ExceptionsResource.OperationFailed); } if (isStringValue && en.Key is string) { result = en.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = ((IComparable)en.Key).CompareTo(key); } if (result <= 0) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } else { break; } } break; case COMPARE.REGEX: resultKeys = GetWrappedResult(resultKeys, key, false); en = this.GetEnumerator(); pattern = key as string; regex = new WildcardEnabledRegex(pattern); while (en.MoveNext()) { if (token != null && token.IsCancellationRequested) { throw new OperationCanceledException(ExceptionsResource.OperationFailed); } if (en.Key is string) { if (regex.IsMatch(en.Key.ToString().ToLower())) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } } } break; case COMPARE.IREGEX: resultKeys = GetWrappedResult(resultKeys, key, true); en = this.GetEnumerator(); pattern = key as string; regex = new WildcardEnabledRegex(pattern); while (en.MoveNext()) { if (token != null && token.IsCancellationRequested) { throw new OperationCanceledException(ExceptionsResource.OperationFailed); } if (en.Key is string) { if (!regex.IsMatch(en.Key.ToString().ToLower())) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } } } break; } resultKeys.Mark(mergeType); } finally { rwLock.ReleaseReaderLock(); } }
private static void GenerateQueryResults(IndexReader primary_reader, IndexReader secondary_reader, BetterBitArray primary_matches, IQueryResult result, ICollection query_term_list, int max_results, HitFilter hit_filter, string index_name) { int num_hits; if (Debug) { Logger.Log.Debug(">>> {0}: Initially handed {1} matches", index_name, primary_matches.TrueCount); } if (primary_matches.TrueCount <= max_results) { if (Debug) { Logger.Log.Debug(">>> {0}: Initial count is within our limit of {1}", index_name, max_results); } num_hits = primary_matches.TrueCount; } else { if (Debug) { Logger.Log.Debug(">>> {0}: Number of hits is capped at {1}", index_name, max_results); } num_hits = max_results; } Stopwatch total, d, e; total = new Stopwatch(); d = new Stopwatch(); e = new Stopwatch(); total.Start(); ArrayList final_list_of_hits = null; // This is used only for scoring Dictionary <int, Hit> hits_by_id = new Dictionary <int, Hit> (num_hits); int total_number_of_matches = primary_matches.TrueCount; if (primary_matches.TrueCount > max_results) { final_list_of_hits = ScanRecentDocs(primary_reader, secondary_reader, primary_matches, hits_by_id, max_results, ref total_number_of_matches, hit_filter, index_name); } if (final_list_of_hits == null) { final_list_of_hits = FindRecentResults(primary_reader, secondary_reader, primary_matches, hits_by_id, max_results, ref total_number_of_matches, hit_filter, index_name); } d.Start(); ScoreHits(hits_by_id, primary_reader, query_term_list); hits_by_id = null; d.Stop(); if (Debug) { Log.Debug(">>> {0}: Scored hits in {1}", index_name, d); } e.Start(); // 25 hits seems to be the sweet spot: anything lower // and serialization overhead gets us, higher takes // longer to send out. const int MAX_QUEUED_HITS = 25; int sent_index = 0; // Break up the hits into reasonably sized chunks for // sending over the wire. for (int i = 0; i < final_list_of_hits.Count; ++i) { // Flush our hits if (i > 0 && i % MAX_QUEUED_HITS == 0) { result.Add(final_list_of_hits.GetRange(0, MAX_QUEUED_HITS)); final_list_of_hits.RemoveRange(0, MAX_QUEUED_HITS); i -= MAX_QUEUED_HITS; } } // Flush the remaining hits result.Add(final_list_of_hits, total_number_of_matches); final_list_of_hits = null; e.Stop(); if (Debug) { Log.Debug(">>> {0}: Hit filters executed and results sent in {1}", index_name, e); } total.Stop(); if (Debug) { Logger.Log.Debug(">>> {0}: GenerateQueryResults time statistics:", index_name); //Logger.Log.Debug (">>> {0}: Short circuit {1,6} ({2:0.0}%)", index_name, a == null ? "N/A" : a.ToString (), a == null ? 0.0 : 100 * a.ElapsedTime / total.ElapsedTime); //Logger.Log.Debug (">>> {0}: Create docs {1,6} ({2:0.0}%)", index_name, b, 100 * b.ElapsedTime / total.ElapsedTime); //Logger.Log.Debug (">>> {0}: Hit assembly {1,6} ({2:0.0}%)", index_name, c, 100 * c.ElapsedTime / total.ElapsedTime); Logger.Log.Debug(">>> {0}: Scored hits {1,6} ({2:0.0}%)", index_name, d, 100 * d.ElapsedTime / total.ElapsedTime); Logger.Log.Debug(">>> {0}: Results sent {1,6} ({2:0.0}%)", index_name, e, 100 * e.ElapsedTime / total.ElapsedTime); Logger.Log.Debug(">>> {0}: TOTAL {1,6}", index_name, total); } }
public static void DoQueryResponseHandler(IAsyncResult ar) { ReqContext rc = (ReqContext)ar.AsyncState; IQueryable iq = rc.GetQueryable; BeagleWebService wsp = rc.GetProxy; IQueryResult result = rc.GetResult; int count = 0; //bool hitRejectsLogged = false; try { SearchResult resp = wsp.EndBeagleQuery(ar); if ((resp != null) && (resp.numResults > 0)) { if (rc.SearchToken == null) { rc.SearchToken = resp.searchToken; } //NetContext nc = new NetContext(wsp, resp.searchToken); HitResult[] hres = resp.hitResults; ArrayList nwhits = new ArrayList(); for (int i = 0; i < hres.Length; i++) { try { HitResult hr = hres[i]; Hit hit = new NetworkHit(); //[Uri Format] netbeagle://164.99.153.134:8888/searchToken?http:///.... if (hr.uri.StartsWith(NetworkedBeagle.BeagleNetPrefix)) { hit.UriAsString = hr.uri; } else { string[] fragments = hr.uri.Split('/'); string hostNamePort = fragments[2]; hit.UriAsString = NetworkedBeagle.BeagleNetPrefix + hostNamePort + "/" + resp.searchToken + "?" + hr.uri; } hit.Type = hr.resourceType; hit.MimeType = hr.mimeType; hit.Source = "Network"; //hit.Source = hr.source; hit.Score = hr.score; if (hr.properties.Length > 0) { foreach (HitProperty hp in hr.properties) { Property p = Property.New(hp.PKey, hp.PVal); p.IsMutable = hp.IsMutable; p.IsSearched = hp.IsSearched; hit.AddProperty(p); } } //Add Snippet ((NetworkHit)hit).snippet = hr.snippet; //if (hr.snippet != null) //log.Debug("\nNBH: URI" + i + "=" + hr.uri + "\n Snippet=" + hr.snippet); ((NetworkHit)hit).context = new NetContext(hr.hashCode); //Add NetBeagleQueryable instance hit.SourceObject = iq; hit.SourceObjectName = ((NetworkedBeagle)iq).Name; nwhits.Add(hit); count++; } catch (Exception ex2) { log.Warn("Exception in NetBeagleHandler: DoQueryResponseHandler() while processing NetworkHit: {0} from {1}\n Reason: {2} ", hres[i].uri, wsp.Hostname + ":" + wsp.Port, ex2.Message); //log.Error ("Exception StackTrace: " + ex.StackTrace); } } //end for if (nwhits.Count > 0) { result.Add(nwhits); } /* * if ((! result.Add (nwhits)) && (! hitRejectsLogged)) * { * hitRejectsLogged = true; * log.Info("NetBeagleHandler: Network Hits rejected by HitRegulator. Too many Hits!"); * } */ log.Info("NetBeagleHandler: DoQueryResponseHandler() Got {0} result(s) from Index {1} from Networked Beagle at {2}", count, resp.firstResultIndex, wsp.Hostname + ":" + wsp.Port); int index = resp.firstResultIndex + resp.numResults; if (index < resp.totalResults) { log.Debug("NetBeagleHandler: DoQueryResponseHandler() invoking GetResults with index: " + index); string searchToken = resp.searchToken; GetResultsRequest req = new GetResultsRequest(); req.startIndex = index; req.searchToken = searchToken; IAsyncResult ar2; ar2 = wsp.BeginGetResults(req, NetBeagleHandler.DoQueryResponseHandler, rc); return; } } //end if else { if (resp == null) { log.Warn("NetBeagleHandler: DoQueryResponseHandler() got Null response from EndBeagleQuery() !"); } } } catch (Exception ex) { log.Error("Exception in NetBeagleHandler: DoQueryResponseHandler() - {0} - for {1} ", ex.Message, wsp.Hostname + ":" + wsp.Port); } //Signal completion of request handling rc.RequestProcessed = true; }
///<summary> /// GetData /// Gets the data object associated with the specified key ///<summary> public void GetData(T key, COMPARE compareType, IQueryResult resultKeys, CollectionOperation mergeType) { lock (_mutex) { int result; ClusteredArrayList keyList = new ClusteredArrayList(); RedBlackNode <T> treeNode = rbTree; // begin at root IDictionaryEnumerator en = this.GetEnumerator(); string pattern; WildcardEnabledRegex regex; HashVector finalTable = null; HashVector skippedKeys = null; bool isStringValue = false; if (key is string) { isStringValue = true; } switch (compareType) { case COMPARE.EQ: // traverse tree until node is found while (treeNode != _sentinelNode) { if (isStringValue && treeNode.Key is string) { result = treeNode.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = treeNode.Key.CompareTo(key); } if (result == 0) { lastNodeFound = treeNode; resultKeys.Add(treeNode.Data, mergeType); //return treeNode.Data; } if (result > 0) //treenode is Greater then the one we are looking. Move to Left branch { treeNode = treeNode.Left; } else { treeNode = treeNode.Right; //treenode is Less then the one we are looking. Move to Right branch. } } break; case COMPARE.NE: // traverse tree until node is found finalTable = new HashVector(); while (en.MoveNext()) { if (isStringValue && en.Key is string) { result = en.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = ((IComparable)en.Key).CompareTo(key); } if (result != 0) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } } break; case COMPARE.GT: finalTable = new HashVector(); while (en.MoveNext()) { if (isStringValue && en.Key is string) { result = en.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = ((IComparable)en.Key).CompareTo(key); } if (result > 0) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } } break; case COMPARE.LT: finalTable = new HashVector(); while (en.MoveNext()) { if (isStringValue && en.Key is string) { result = en.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = ((IComparable)en.Key).CompareTo(key); } if (result < 0) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } } break; case COMPARE.GTEQ: finalTable = new HashVector(); while (en.MoveNext()) { if (isStringValue && en.Key is string) { result = en.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = ((IComparable)en.Key).CompareTo(key); } if (result >= 0) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } } break; case COMPARE.LTEQ: finalTable = new HashVector(); while (en.MoveNext()) { if (isStringValue && en.Key is string) { result = en.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); } else { result = ((IComparable)en.Key).CompareTo(key); } if (result <= 0) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } else { break; } } break; case COMPARE.REGEX: en = this.GetEnumerator(); pattern = key as string; regex = new WildcardEnabledRegex(pattern); while (en.MoveNext()) { if (en.Key is string) { if (regex.IsMatch(en.Key.ToString().ToLower())) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } } } break; case COMPARE.IREGEX: en = this.GetEnumerator(); pattern = key as string; regex = new WildcardEnabledRegex(pattern); while (en.MoveNext()) { if (en.Key is string) { if (!regex.IsMatch(en.Key.ToString().ToLower())) { HashVector tmp = en.Value as HashVector; resultKeys.Add(tmp, mergeType); } } } break; } resultKeys.Mark(mergeType); } }
public void DoQuery (Query query, IQueryResult query_result, IQueryableChangeData i_change_data) { ChangeData change_data = (ChangeData) i_change_data; ICollection added_uris = null; // Index listeners never return any initial matches. if (change_data == null && query.IsIndexListener) return; if (change_data != null) { if (change_data.RemovedUris != null) query_result.Subtract (change_data.RemovedUris); // If nothing was added, we can safely return now: this change // cannot have any further effect on an outstanding live query. if (change_data.AddedUris == null || change_data.AddedUris.Count == 0) return; added_uris = change_data.AddedUris; // If this is an index listener, we don't need to do a query: // we just build up synthethic hits and add them unconditionally. if (query.IsIndexListener) { ArrayList synthetic_hits = new ArrayList (); foreach (Uri uri in added_uris) { Hit hit = new Hit (); hit.Uri = uri; if (our_hit_filter != null) { bool accept = false; try { accept = our_hit_filter (hit); } catch (Exception e) { Log.Warn (e, "Caught an exception in HitFilter for {0}", hit.Uri); } if (! accept) continue; } synthetic_hits.Add (hit); } if (synthetic_hits.Count > 0) query_result.Add (synthetic_hits); return; } } Driver.DoQuery (query, query_result, added_uris, backend_query_part_hook, our_hit_filter); }
public void DoQuery (Query query, IQueryResult result, IQueryableChangeData changeData) { StringBuilder sb = new StringBuilder (); foreach (QueryPart part in query.Parts) { if (part is QueryPart_Human) { sb.Append (((QueryPart_Human) part).QueryString); } else if (part is QueryPart_Text) { sb.Append (((QueryPart_Text) part).Text); } else continue; // ignore other query parts } Logger.Log.Debug ("Querying google for '" + sb.ToString () + "'"); GoogleSearchResult gsr = gss.doGoogleSearch (googleKey, sb.ToString (), 0, maxResults, false, "", false, "", "", ""); ArrayList hits = new ArrayList (); int rank = 0; foreach (ResultElement elt in gsr.resultElements) { Hit hit = FromGoogleResultElement (elt, rank); hits.Add (hit); ++rank; } result.Add (hits, gsr.estimatedTotalResultsCount); }
private void DoGMailQuery (string query, int maxhits, IQueryResult result) { Log.Debug ("GMailSearchDriver: Searching for [{0}]", query); MessageSet results = imap_client.Search (query, false); if (results == null) { return; } Log.Debug ("Recvd {0} messages", results.Messages.Count); // Get the messages in reverse order; latest first ArrayList matched_ids = new ArrayList (results.Messages); matched_ids.Reverse (); const int MAX_QUEUED_HITS = 25; int left = Math.Min (maxhits, matched_ids.Count); ArrayList result_batch = new ArrayList (MAX_QUEUED_HITS); MailCollection emails; GMime.StreamMem stream; GMime.Parser parser; GMime.Message message; Hit hit; foreach (string id in matched_ids) { if (left -- == 0) break; Log.Debug ("Fetching headers for message id {0}", id); emails = imap_client.FetchMessages (id, id, false,true,false); if (emails == null || emails.Count == 0) { Log.Error ("IMAP error: {0}", imap_client.LastError); continue; } foreach (Mail m in emails) { hit = null; using (stream = new GMime.StreamMem (m.Header)) using (parser = new GMime.Parser (stream)) using (message = parser.ConstructMessage ()) hit = MessageToHit (message); if (hit == null) { Log.Error ("Bad IMAP email {0}: no msg-id", id); continue; } else { result_batch.Add (hit); } } if (result_batch.Count >= MAX_QUEUED_HITS) { result.Add (result_batch); result_batch.Clear (); } } result.Add (result_batch, matched_ids.Count); }
public void DoQuery(Query query, IQueryResult result, IQueryableChangeData data) { // Get rid of the standard UnixTransport so that we can // forward our local query to remote hosts. query.Transports.Clear(); List <string[]> network_services = Conf.Networking.GetListOptionValues(Conf.Names.NetworkServices); if (network_services != null) { foreach (string[] service in network_services) { query.RegisterTransport(new HttpTransport(service [1])); } } // Anonymous delegates cannot be un-registered ... hence Query.HitsAdded hits_added_handler; hits_added_handler = delegate(HitsAddedResponse response) { //Console.WriteLine ("Adding hits added response"); TransformResponse(response); result.Add(response.Hits, response.NumMatches); }; Query.HitsSubtracted hits_subtracted_handler; hits_subtracted_handler = delegate(HitsSubtractedResponse response) { // Console.WriteLine ("Adding hits subtracted response"); result.Subtract(response.Uris); }; Query.Finished finished_handler; finished_handler = delegate(FinishedResponse response) { //Console.WriteLine ("Adding finished response"); // NO-OP }; // FIXME: ClosedEvent ? Should be handled by HttpTransport but should we do something more query.HitsAddedEvent += hits_added_handler; query.HitsSubtractedEvent += hits_subtracted_handler; query.FinishedEvent += finished_handler; Exception throw_me = null; try { query.SendAsyncBlocking(); } catch (Exception ex) { throw_me = ex; } // FIXME FIXME FIXME: Live query does not work! query.HitsAddedEvent -= hits_added_handler; query.HitsSubtractedEvent -= hits_subtracted_handler; query.FinishedEvent -= finished_handler; query.Transports.Clear(); if (throw_me != null) { throw throw_me; } return; }
private static void GenerateQueryResults (IndexReader primary_reader, IndexReader secondary_reader, BetterBitArray primary_matches, IQueryResult result, ICollection query_term_list, int max_results, HitFilter hit_filter, string index_name) { int num_hits; if (Debug) Logger.Log.Debug (">>> {0}: Initially handed {1} matches", index_name, primary_matches.TrueCount); if (primary_matches.TrueCount <= max_results) { if (Debug) Logger.Log.Debug (">>> {0}: Initial count is within our limit of {1}", index_name, max_results); num_hits = primary_matches.TrueCount; } else { if (Debug) Logger.Log.Debug (">>> {0}: Number of hits is capped at {1}", index_name, max_results); num_hits = max_results; } Stopwatch total, d, e; total = new Stopwatch (); d = new Stopwatch (); e = new Stopwatch (); total.Start (); ArrayList final_list_of_hits = null; // This is used only for scoring Dictionary<int, Hit> hits_by_id = new Dictionary<int, Hit> (num_hits); int total_number_of_matches = primary_matches.TrueCount; if (primary_matches.TrueCount > max_results) final_list_of_hits = ScanRecentDocs (primary_reader, secondary_reader, primary_matches, hits_by_id, max_results, ref total_number_of_matches, hit_filter, index_name); if (final_list_of_hits == null) final_list_of_hits = FindRecentResults (primary_reader, secondary_reader, primary_matches, hits_by_id, max_results, ref total_number_of_matches, hit_filter, index_name); d.Start (); ScoreHits (hits_by_id, primary_reader, query_term_list); hits_by_id = null; d.Stop (); if (Debug) Log.Debug (">>> {0}: Scored hits in {1}", index_name, d); e.Start (); // 25 hits seems to be the sweet spot: anything lower // and serialization overhead gets us, higher takes // longer to send out. const int MAX_QUEUED_HITS = 25; int sent_index = 0; // Break up the hits into reasonably sized chunks for // sending over the wire. for (int i = 0; i < final_list_of_hits.Count; ++i) { // Flush our hits if (i > 0 && i % MAX_QUEUED_HITS == 0) { result.Add (final_list_of_hits.GetRange (0, MAX_QUEUED_HITS)); final_list_of_hits.RemoveRange (0, MAX_QUEUED_HITS); i -= MAX_QUEUED_HITS; } } // Flush the remaining hits result.Add (final_list_of_hits, total_number_of_matches); final_list_of_hits = null; e.Stop (); if (Debug) Log.Debug (">>> {0}: Hit filters executed and results sent in {1}", index_name, e); total.Stop (); if (Debug) { Logger.Log.Debug (">>> {0}: GenerateQueryResults time statistics:", index_name); //Logger.Log.Debug (">>> {0}: Short circuit {1,6} ({2:0.0}%)", index_name, a == null ? "N/A" : a.ToString (), a == null ? 0.0 : 100 * a.ElapsedTime / total.ElapsedTime); //Logger.Log.Debug (">>> {0}: Create docs {1,6} ({2:0.0}%)", index_name, b, 100 * b.ElapsedTime / total.ElapsedTime); //Logger.Log.Debug (">>> {0}: Hit assembly {1,6} ({2:0.0}%)", index_name, c, 100 * c.ElapsedTime / total.ElapsedTime); Logger.Log.Debug (">>> {0}: Scored hits {1,6} ({2:0.0}%)", index_name, d, 100 * d.ElapsedTime / total.ElapsedTime); Logger.Log.Debug (">>> {0}: Results sent {1,6} ({2:0.0}%)", index_name, e, 100 * e.ElapsedTime / total.ElapsedTime); Logger.Log.Debug (">>> {0}: TOTAL {1,6}", index_name, total); } }
public void DoQuery (Query query, IQueryResult result, IQueryableChangeData data) { string search = null; foreach (QueryPart qp in query.Parts) { if (qp is QueryPart_Text) { search = ((QueryPart_Text) qp).Text; break; } } if (String.IsNullOrEmpty (search)) return; SafeProcess pc = new SafeProcess (); // Double the max-hits since it is hard to tell locate to ignore // hidden files and directories; so we prune them later. // So if hidden files are returned first, you are doomed pc.Arguments = new string[] { "locate", "-P", "-e", "-l", (2 * query.MaxHits).ToString (), search }; pc.RedirectStandardOutput = true; pc.RedirectStandardError = false; pc.UseLangC = true; try { pc.Start (); } catch (Beagle.Util.SafeProcessException e) { Log.Error (e, "Error while running 'locate -P -e -l {0} {1}'", (2 * query.MaxHits), search); return; } string match = null; ArrayList result_batch = new ArrayList (); const int MAX_QUEUED_HITS = 25; Hit hit; int count = 0; using (StreamReader pout = new StreamReader (pc.StandardOutput)) { while (count < query.MaxHits && ! pout.EndOfStream) { match = pout.ReadLine (); hit = PathToHit (match); if (hit == null) continue; result_batch.Add (hit); if (result_batch.Count >= MAX_QUEUED_HITS) { result.Add (result_batch); result_batch.Clear (); } count ++; } } result.Add (result_batch, count); pc.Close (); }
private void DoGMailQuery(string query, int maxhits, IQueryResult result) { Log.Debug("GMailSearchDriver: Searching for [{0}]", query); MessageSet results = imap_client.Search(query, false); if (results == null) { return; } Log.Debug("Recvd {0} messages", results.Messages.Count); // Get the messages in reverse order; latest first ArrayList matched_ids = new ArrayList(results.Messages); matched_ids.Reverse(); const int MAX_QUEUED_HITS = 25; int left = Math.Min(maxhits, matched_ids.Count); ArrayList result_batch = new ArrayList(MAX_QUEUED_HITS); MailCollection emails; GMime.StreamMem stream; GMime.Parser parser; GMime.Message message; Hit hit; foreach (string id in matched_ids) { if (left-- == 0) { break; } Log.Debug("Fetching headers for message id {0}", id); emails = imap_client.FetchMessages(id, id, false, true, false); if (emails == null || emails.Count == 0) { Log.Error("IMAP error: {0}", imap_client.LastError); continue; } foreach (Mail m in emails) { hit = null; using (stream = new GMime.StreamMem(m.Header)) using (parser = new GMime.Parser(stream)) using (message = parser.ConstructMessage()) hit = MessageToHit(message); if (hit == null) { Log.Error("Bad IMAP email {0}: no msg-id", id); continue; } else { result_batch.Add(hit); } } if (result_batch.Count >= MAX_QUEUED_HITS) { result.Add(result_batch); result_batch.Clear(); } } result.Add(result_batch, matched_ids.Count); }