public IAsyncResult DoQuery (Query query,  IQueryResult result,
				     					IQueryableChangeData changeData)
		{
      		ICollection l;
     		
			SearchRequest sreq = new SearchRequest();

      		l = query.Text;
      		if ((l != null) && (l.Count > 0))
				sreq.text = ICollection2StringList(query.Text);
					
      		l = query.MimeTypes;
      		if ((l != null) && (l.Count > 0))					
				sreq.mimeType = ICollection2StringList(query.MimeTypes);

			l = query.Sources;
      		if ((l != null) && (l.Count > 0))						
				sreq.searchSources = ICollection2StringList(query.Sources);
				
			sreq.qdomain = QueryDomain.Global; //Caution: This Enables Cascaded NetBeagle searching !
			//sreq.qdomain = QueryDomain.System;
			
			//Cache the query request, get a unique searchId and include in network searchRequest:
			sreq.searchId = NetworkedBeagle.AddRequest(query);

			
			int hc = NetworkedBeagle.HopCount(query);
			sreq.hopCount = (hc > 0) ? hc:1;

			log.Info("NetBeagleHandler: Starting WebService Query for " + Hostname + ":" + Port);
			
			ReqContext rc = new ReqContext(wsp, result, netBeagleQueryable);
				
			IAsyncResult ar = wsp.BeginBeagleQuery(sreq, DoQueryResponseHandler, rc);

			// Return w/o waiting for Async query to complete.	
			// Return IAsynResult handle, to allow caller to control it, if required. 
					   				
			return ar;
		}
Example #2
0
 public System.IAsyncResult BeginBeagleQuery(SearchRequest req, System.AsyncCallback callback, object asyncState)
 {
     return(this.BeginInvoke("BeagleQuery", new object[] {
         req
     }, callback, asyncState));
 }
 public SearchResult BeagleQuery(SearchRequest req) {
     object[] results = this.Invoke("BeagleQuery", new object[] {
         req});
     return ((SearchResult)(results[0]));
 }
 public System.IAsyncResult BeginBeagleQuery(SearchRequest req, System.AsyncCallback callback, object asyncState) {
     return this.BeginInvoke("BeagleQuery", new object[] {
         req}, callback, asyncState);
 }
		//Full beagledQuery
		public SearchResult doQuery(SearchRequest sreq, bool isLocalReq)
		{	
			SearchResult sr = null;

			if (sreq.text == null || sreq.text.Length == 0 ||
				(sreq.text.Length == 1 && sreq.text[0].Trim() == "") ) {
				
			    sr = new SearchResult();
			    sr.statusCode = SC_INVALID_QUERY;
			    sr.statusMsg = "Error: No search terms specified";
				return sr;
			}

			Query query = new Query();

			string searchString = "";						
			foreach (string text in sreq.text)  {
				query.AddText(text);
				searchString += (searchString.Length == 0) ? text:" " + text;
			}			
			
			Logger.Log.Info("WebServiceBackEnd: Received {0} WebService Query with search term: {1}", isLocalReq ? "Local":"External", searchString.Trim());

			if (sreq.mimeType != null && sreq.mimeType[0] != null)
				foreach (string mtype in sreq.mimeType)
					query.AddMimeType(mtype);

			if (sreq.searchSources != null && sreq.searchSources[0] != null)
				foreach (string src in sreq.searchSources)
					query.AddSource(src);

			//If needed, check to restrict queries to System or Neighborhood domain, can be added here
			if (sreq.qdomain > 0)
				query.AddDomain(sreq.qdomain);
						
			if (!isLocalReq) {	//External Request, check if this Node is already processing it

			 	lock (this) {					
					if ((sreq.searchId != 0) && NetworkedBeagle.IsCachedRequest(sreq.searchId)) {

						sr = new SearchResult();
				    	sr.numResults = sr.totalResults = sr.firstResultIndex = 0;
						sr.hitResults = new HitResult[sr.numResults];	
				 		sr.searchToken = "";

				 		sr.statusCode = SC_DUPLICATE_QUERY;
				 		sr.statusMsg = "Error: Duplicate Query loopback";
				 		Logger.Log.Warn("WebServiceBackEnd: Received duplicate Query for a query already in process!");
				 		Logger.Log.Warn("WebServiceBackEnd: Check NetBeagle configuration on all nodes to remove possible loops");
				 	}
	
					if (sreq.hopCount >= 5)  {
						//If request has traversed 5 nodes in reaching here, stop cascading. 
						//Make it a Local Query.
						query.RemoveDomain(sreq.qdomain);
						query.AddDomain(QueryDomain.System);
				 	}
				 					 	
					if ((sr == null) && (sreq.searchId != 0) )
						NetworkedBeagle.CacheRequest(query, sreq.searchId, sreq.hopCount + 1);				 	
				 }
				 
				 if (sr != null)
				 	return sr;	
				 	
				 //Logger.Log.Info("New external Query: searchId = {0}", sreq.searchId); 	
			}

			ArrayList results = ArrayList.Synchronized(new ArrayList());
			
			QueryResult qres = new QueryResult ();

			string searchToken = TokenGenerator();
						
			SessionData sdata = new SessionData(query, results, isLocalReq);
				
			AttachQueryResult (qres, sdata);
			
/* Include this code, if sessionID passed from front-end:
			if (sessionTable.Contains(searchToken))
				sessionTable[searchToken] = sdata;
			else
*/
			sessionTable.Add(searchToken, sdata);
			
			QueryDriver.DoQueryLocal (query, qres);
					
			while (resultTable.Contains(qres) && (results.Count < MAX_RESULTS_PER_CALL) )
				Thread.Sleep(100);

			//Console.WriteLine("WebServiceBackEnd: Got {0} results from beagled", results.Count);
			sr = new SearchResult();

			if (results.Count > 0)
			{ 
			  lock (results.SyncRoot) { //Lock results ArrayList to prevent more Hits added till we've processed doQuery
			
				sr.numResults = results.Count < MAX_RESULTS_PER_CALL ? results.Count: MAX_RESULTS_PER_CALL;	
				sr.hitResults = new HitResult[sr.numResults];
			    
			    string hitUri;			
				for (int i = 0; i < sr.numResults; i++) {
				
					Hit h = (Hit) results[i];

					string snippet; 
						
					//Queryable queryable = h.SourceObject as Queryable;
					Queryable queryable = QueryDriver.GetQueryable (h.SourceObjectName);
					
					if (queryable == null)
						snippet = "ERROR: hit.SourceObject is null, uri=" + h.Uri;
					else
						snippet = queryable.GetSnippet (ICollection2StringList(query.StemmedText), h);											
								
					sr.hitResults[i] = new HitResult();
					
					hitUri = h.UriAsString;
					if (isLocalReq || hitUri.StartsWith(NetworkedBeagle.BeagleNetPrefix))
							sr.hitResults[i].uri = hitUri;
					else
							sr.hitResults[i].uri = AccessFilter.TranslateHit(h);
					
	        	    sr.hitResults[i].resourceType = h.Type;
					sr.hitResults[i].mimeType = h.MimeType;
					sr.hitResults[i].source = h.Source;
					sr.hitResults[i].score = h.Score;				
				
					int plen = h.Properties.Count;
					sr.hitResults[i].properties = new HitProperty[plen];
					for (int j = 0; j < plen; j++) {
						Property p = (Property) h.Properties[j];
						sr.hitResults[i].properties[j] = new HitProperty();
						sr.hitResults[i].properties[j].PKey = p.Key;
						sr.hitResults[i].properties[j].PVal = p.Value;				
						sr.hitResults[i].properties[j].IsMutable = p.IsMutable;				
						sr.hitResults[i].properties[j].IsSearched = p.IsSearched;							
					}

					sr.hitResults[i].hashCode = h.GetHashCode ();
					
					if (snippet != null)
						sr.hitResults[i].snippet = snippet.Trim();
				}					
			   } //end lock
			 }// end if
			 else {

			    sr.numResults = 0;
				sr.hitResults = new HitResult[sr.numResults];	
			 }

			 sr.totalResults = results.Count;
			 
			 sr.firstResultIndex = 0;			 
			 sr.searchToken = "";
				
			 if (sr.totalResults > 0)
				sr.searchToken = searchToken;
					
			 sr.statusCode = SC_QUERY_SUCCESS;
			 sr.statusMsg = "Success";
			 Logger.Log.Info("WebServiceBackEnd: Total Results = "  + sr.totalResults);			
			 return sr;
		}