Beispiel #1
0
        private void Search(String searchString)
        {
            entry.Text = searchString;
            StoreSearch(searchString);

            if (query != null)
            {
                try { DetachQuery(); } catch (ObjectDisposedException e) {}
            }

            query = new Query();
            query.AddDomain(QueryDomain.Neighborhood);

            // FIXME: Disable non-local searching for now.
            //query.AddDomain (QueryDomain.Global);

            query.AddText(searchString);
            root.SetSource(hit_type);

            AttachQuery();

            root.Query = query;
            root.Start();

            StartQuery();

            UpdatePage();
        }
        private void ThreadedImport()
        {
            query = new Query();
            query.AddDomain(QueryDomain.Neighborhood);
            query.MaxHits = 10000; // ugh?

            QueryPart_Property file_part = new QueryPart_Property();

            file_part.Type  = PropertyType.Keyword;
            file_part.Key   = "beagle:HitType";
            file_part.Value = "File";
            query.AddPart(file_part);

            QueryPart_Or query_part_union = new QueryPart_Or();

            foreach (string mimetype in supported_mime_types)
            {
                QueryPart_Property part = new QueryPart_Property();
                part.Type  = PropertyType.Keyword;
                part.Key   = "beagle:MimeType";
                part.Value = mimetype;
                query_part_union.Add(part);
            }

            query.AddPart(query_part_union);

            query.HitsAddedEvent += OnHitsAdded;
            query.FinishedEvent  += OnFinished;

            user_event                  = new ActiveUserEvent(Catalog.GetString("Import from Beagle"));
            user_event.Header           = Catalog.GetString("Importing from Beagle");
            user_event.Message          = Catalog.GetString("Running query...");
            user_event.Icon             = Icon;
            user_event.CancelRequested += OnCancelRequested;

            try {
                query.SendAsyncBlocking();
            } catch (Exception e) {
                DisposeQuery();
                LogCore.Instance.PushError(Catalog.GetString("Could not query Beagle Daemon"),
                                           e.Message, true);
                return;
            }

            if (SourceManager.ActiveSource is LibrarySource)
            {
                LibrarySource.Instance.Activate();
            }
        }
        //Full beagledQuery
        public SearchResult doQuery(SearchRequest sreq, bool isLocalReq)
        {
            SearchResult sr = null;

            if (sreq.text == null || sreq.text.Length == 0 ||
                (sreq.text.Length == 1 && sreq.text[0].Trim() == ""))
            {
                sr            = new SearchResult();
                sr.statusCode = SC_INVALID_QUERY;
                sr.statusMsg  = "Error: No search terms specified";
                return(sr);
            }

            Query query = new Query();

            string searchString = "";

            foreach (string text in sreq.text)
            {
                query.AddText(text);
                searchString += (searchString.Length == 0) ? text:" " + text;
            }

            Logger.Log.Info("WebServiceBackEnd: Received {0} WebService Query with search term: {1}", isLocalReq ? "Local":"External", searchString.Trim());

            if (sreq.mimeType != null && sreq.mimeType[0] != null)
            {
                foreach (string mtype in sreq.mimeType)
                {
                    query.AddMimeType(mtype);
                }
            }

            if (sreq.searchSources != null && sreq.searchSources[0] != null)
            {
                foreach (string src in sreq.searchSources)
                {
                    query.AddSource(src);
                }
            }

            //If needed, check to restrict queries to System or Neighborhood domain, can be added here
            if (sreq.qdomain > 0)
            {
                query.AddDomain(sreq.qdomain);
            }

            if (!isLocalReq)                    //External Request, check if this Node is already processing it

            {
                lock (this) {
                    if ((sreq.searchId != 0) && NetworkedBeagle.IsCachedRequest(sreq.searchId))
                    {
                        sr             = new SearchResult();
                        sr.numResults  = sr.totalResults = sr.firstResultIndex = 0;
                        sr.hitResults  = new HitResult[sr.numResults];
                        sr.searchToken = "";

                        sr.statusCode = SC_DUPLICATE_QUERY;
                        sr.statusMsg  = "Error: Duplicate Query loopback";
                        Logger.Log.Warn("WebServiceBackEnd: Received duplicate Query for a query already in process!");
                        Logger.Log.Warn("WebServiceBackEnd: Check NetBeagle configuration on all nodes to remove possible loops");
                    }

                    if (sreq.hopCount >= 5)
                    {
                        //If request has traversed 5 nodes in reaching here, stop cascading.
                        //Make it a Local Query.
                        query.RemoveDomain(sreq.qdomain);
                        query.AddDomain(QueryDomain.System);
                    }

                    if ((sr == null) && (sreq.searchId != 0))
                    {
                        NetworkedBeagle.CacheRequest(query, sreq.searchId, sreq.hopCount + 1);
                    }
                }

                if (sr != null)
                {
                    return(sr);
                }

                //Logger.Log.Info("New external Query: searchId = {0}", sreq.searchId);
            }

            ArrayList results = ArrayList.Synchronized(new ArrayList());

            QueryResult qres = new QueryResult();

            string searchToken = TokenGenerator();

            SessionData sdata = new SessionData(query, results, isLocalReq);

            AttachQueryResult(qres, sdata);

/* Include this code, if sessionID passed from front-end:
 *                      if (sessionTable.Contains(searchToken))
 *                              sessionTable[searchToken] = sdata;
 *                      else
 */
            sessionTable.Add(searchToken, sdata);

            QueryDriver.DoQueryLocal(query, qres);

            while (resultTable.Contains(qres) && (results.Count < MAX_RESULTS_PER_CALL))
            {
                Thread.Sleep(100);
            }

            //Console.WriteLine("WebServiceBackEnd: Got {0} results from beagled", results.Count);
            sr = new SearchResult();

            if (results.Count > 0)
            {
                lock (results.SyncRoot) {           //Lock results ArrayList to prevent more Hits added till we've processed doQuery
                    sr.numResults = results.Count < MAX_RESULTS_PER_CALL ? results.Count: MAX_RESULTS_PER_CALL;
                    sr.hitResults = new HitResult[sr.numResults];

                    string hitUri;
                    for (int i = 0; i < sr.numResults; i++)
                    {
                        Hit h = (Hit)results[i];

                        string snippet;

                        //Queryable queryable = h.SourceObject as Queryable;
                        Queryable queryable = QueryDriver.GetQueryable(h.SourceObjectName);

                        if (queryable == null)
                        {
                            snippet = "ERROR: hit.SourceObject is null, uri=" + h.Uri;
                        }
                        else
                        {
                            snippet = queryable.GetSnippet(ICollection2StringList(query.StemmedText), h);
                        }

                        sr.hitResults[i] = new HitResult();

                        hitUri = h.UriAsString;
                        if (isLocalReq || hitUri.StartsWith(NetworkedBeagle.BeagleNetPrefix))
                        {
                            sr.hitResults[i].uri = hitUri;
                        }
                        else
                        {
                            sr.hitResults[i].uri = AccessFilter.TranslateHit(h);
                        }

                        sr.hitResults[i].resourceType = h.Type;
                        sr.hitResults[i].mimeType     = h.MimeType;
                        sr.hitResults[i].source       = h.Source;
                        sr.hitResults[i].score        = h.Score;

                        int plen = h.Properties.Count;
                        sr.hitResults[i].properties = new HitProperty[plen];
                        for (int j = 0; j < plen; j++)
                        {
                            Property p = (Property)h.Properties[j];
                            sr.hitResults[i].properties[j]            = new HitProperty();
                            sr.hitResults[i].properties[j].PKey       = p.Key;
                            sr.hitResults[i].properties[j].PVal       = p.Value;
                            sr.hitResults[i].properties[j].IsMutable  = p.IsMutable;
                            sr.hitResults[i].properties[j].IsSearched = p.IsSearched;
                        }

                        sr.hitResults[i].hashCode = h.GetHashCode();

                        if (snippet != null)
                        {
                            sr.hitResults[i].snippet = snippet.Trim();
                        }
                    }
                }         //end lock
            }             // end if
            else
            {
                sr.numResults = 0;
                sr.hitResults = new HitResult[sr.numResults];
            }

            sr.totalResults = results.Count;

            sr.firstResultIndex = 0;
            sr.searchToken      = "";

            if (sr.totalResults > 0)
            {
                sr.searchToken = searchToken;
            }

            sr.statusCode = SC_QUERY_SUCCESS;
            sr.statusMsg  = "Success";
            Logger.Log.Info("WebServiceBackEnd: Total Results = " + sr.totalResults);
            return(sr);
        }
Beispiel #4
0
        public string doQuery(webArgs wargs)
        {
            if (wargs.sessId == null || wargs.searchString == null || wargs.searchString == "")
            {
                return(NO_RESULTS);
            }

            log.Debug("WebBackEnd: Got Search String: " + wargs.searchString);

            Query query = new Query();

            query.AddText(wargs.searchString);
            if (wargs.searchSource != null && wargs.searchSource != "")
            {
                query.AddSource(wargs.searchSource);
                query.AddDomain(QueryDomain.System);
            }
            else
            {
                query.AddDomain(wargs.globalSearch ? QueryDomain.Global:QueryDomain.System);
            }

            QueryResult qres = new QueryResult();

            //Note: QueryDriver.DoQuery() local invocation is used.
            //The root tile is used only for adding hits and generating html.
            BT.SimpleRootTile root = new BT.SimpleRootTile();
            root.Query = query;
            //root.SetSource (searchSource); Do not SetSource on root!

            ResultPair          rp   = new ResultPair(root);
            bufferRenderContext bctx = new bufferRenderContext(rp);
            Resp resp = new Resp(rp, bctx, wargs.isLocalReq);

            AttachQueryResult(qres, resp);

            //Add sessionId-Resp mapping
            if (sessionResp.Contains(wargs.sessId))
            {
                sessionResp[wargs.sessId] = resp;
            }
            else
            {
                sessionResp.Add(wargs.sessId, resp);
            }

            log.Info("WebBackEnd: Starting Query for string \"{0}\"", wargs.searchString);

            QueryDriver.DoQueryLocal(query, qres);

            //Wait only till we have enough results to display
            while ((result.Contains(qres)) &&
                   (root.HitCollection.NumResults < 10))
            {
                Thread.Sleep(100);
            }

            if (root.HitCollection.IsEmpty)
            {
                return(NO_RESULTS);
            }

            lock (root) {
                root.Render(bctx);
                return(getResultsLabel(root) + (wargs.isLocalReq ? bctx.buffer:bctx.bufferForExternalQuery));
            }
        }
		//Full beagledQuery
		public SearchResult doQuery(SearchRequest sreq, bool isLocalReq)
		{	
			SearchResult sr = null;

			if (sreq.text == null || sreq.text.Length == 0 ||
				(sreq.text.Length == 1 && sreq.text[0].Trim() == "") ) {
				
			    sr = new SearchResult();
			    sr.statusCode = SC_INVALID_QUERY;
			    sr.statusMsg = "Error: No search terms specified";
				return sr;
			}

			Query query = new Query();

			string searchString = "";						
			foreach (string text in sreq.text)  {
				query.AddText(text);
				searchString += (searchString.Length == 0) ? text:" " + text;
			}			
			
			Logger.Log.Info("WebServiceBackEnd: Received {0} WebService Query with search term: {1}", isLocalReq ? "Local":"External", searchString.Trim());

			if (sreq.mimeType != null && sreq.mimeType[0] != null)
				foreach (string mtype in sreq.mimeType)
					query.AddMimeType(mtype);

			if (sreq.searchSources != null && sreq.searchSources[0] != null)
				foreach (string src in sreq.searchSources)
					query.AddSource(src);

			//If needed, check to restrict queries to System or Neighborhood domain, can be added here
			if (sreq.qdomain > 0)
				query.AddDomain(sreq.qdomain);
						
			if (!isLocalReq) {	//External Request, check if this Node is already processing it

			 	lock (this) {					
					if ((sreq.searchId != 0) && NetworkedBeagle.IsCachedRequest(sreq.searchId)) {

						sr = new SearchResult();
				    	sr.numResults = sr.totalResults = sr.firstResultIndex = 0;
						sr.hitResults = new HitResult[sr.numResults];	
				 		sr.searchToken = "";

				 		sr.statusCode = SC_DUPLICATE_QUERY;
				 		sr.statusMsg = "Error: Duplicate Query loopback";
				 		Logger.Log.Warn("WebServiceBackEnd: Received duplicate Query for a query already in process!");
				 		Logger.Log.Warn("WebServiceBackEnd: Check NetBeagle configuration on all nodes to remove possible loops");
				 	}
	
					if (sreq.hopCount >= 5)  {
						//If request has traversed 5 nodes in reaching here, stop cascading. 
						//Make it a Local Query.
						query.RemoveDomain(sreq.qdomain);
						query.AddDomain(QueryDomain.System);
				 	}
				 					 	
					if ((sr == null) && (sreq.searchId != 0) )
						NetworkedBeagle.CacheRequest(query, sreq.searchId, sreq.hopCount + 1);				 	
				 }
				 
				 if (sr != null)
				 	return sr;	
				 	
				 //Logger.Log.Info("New external Query: searchId = {0}", sreq.searchId); 	
			}

			ArrayList results = ArrayList.Synchronized(new ArrayList());
			
			QueryResult qres = new QueryResult ();

			string searchToken = TokenGenerator();
						
			SessionData sdata = new SessionData(query, results, isLocalReq);
				
			AttachQueryResult (qres, sdata);
			
/* Include this code, if sessionID passed from front-end:
			if (sessionTable.Contains(searchToken))
				sessionTable[searchToken] = sdata;
			else
*/
			sessionTable.Add(searchToken, sdata);
			
			QueryDriver.DoQueryLocal (query, qres);
					
			while (resultTable.Contains(qres) && (results.Count < MAX_RESULTS_PER_CALL) )
				Thread.Sleep(100);

			//Console.WriteLine("WebServiceBackEnd: Got {0} results from beagled", results.Count);
			sr = new SearchResult();

			if (results.Count > 0)
			{ 
			  lock (results.SyncRoot) { //Lock results ArrayList to prevent more Hits added till we've processed doQuery
			
				sr.numResults = results.Count < MAX_RESULTS_PER_CALL ? results.Count: MAX_RESULTS_PER_CALL;	
				sr.hitResults = new HitResult[sr.numResults];
			    
			    string hitUri;			
				for (int i = 0; i < sr.numResults; i++) {
				
					Hit h = (Hit) results[i];

					string snippet; 
						
					//Queryable queryable = h.SourceObject as Queryable;
					Queryable queryable = QueryDriver.GetQueryable (h.SourceObjectName);
					
					if (queryable == null)
						snippet = "ERROR: hit.SourceObject is null, uri=" + h.Uri;
					else
						snippet = queryable.GetSnippet (ICollection2StringList(query.StemmedText), h);											
								
					sr.hitResults[i] = new HitResult();
					
					hitUri = h.UriAsString;
					if (isLocalReq || hitUri.StartsWith(NetworkedBeagle.BeagleNetPrefix))
							sr.hitResults[i].uri = hitUri;
					else
							sr.hitResults[i].uri = AccessFilter.TranslateHit(h);
					
	        	    sr.hitResults[i].resourceType = h.Type;
					sr.hitResults[i].mimeType = h.MimeType;
					sr.hitResults[i].source = h.Source;
					sr.hitResults[i].score = h.Score;				
				
					int plen = h.Properties.Count;
					sr.hitResults[i].properties = new HitProperty[plen];
					for (int j = 0; j < plen; j++) {
						Property p = (Property) h.Properties[j];
						sr.hitResults[i].properties[j] = new HitProperty();
						sr.hitResults[i].properties[j].PKey = p.Key;
						sr.hitResults[i].properties[j].PVal = p.Value;				
						sr.hitResults[i].properties[j].IsMutable = p.IsMutable;				
						sr.hitResults[i].properties[j].IsSearched = p.IsSearched;							
					}

					sr.hitResults[i].hashCode = h.GetHashCode ();
					
					if (snippet != null)
						sr.hitResults[i].snippet = snippet.Trim();
				}					
			   } //end lock
			 }// end if
			 else {

			    sr.numResults = 0;
				sr.hitResults = new HitResult[sr.numResults];	
			 }

			 sr.totalResults = results.Count;
			 
			 sr.firstResultIndex = 0;			 
			 sr.searchToken = "";
				
			 if (sr.totalResults > 0)
				sr.searchToken = searchToken;
					
			 sr.statusCode = SC_QUERY_SUCCESS;
			 sr.statusMsg = "Success";
			 Logger.Log.Info("WebServiceBackEnd: Total Results = "  + sr.totalResults);			
			 return sr;
		}