Пример #1
0
    private static List <SnippetLine> GetSnippet(SnippetRequest request)
    {
        Queryable      queryable = QueryDriver.GetQueryable(request.Hit.Source);
        ISnippetReader snippet_reader;
        bool           full_text  = request.FullText;
        int            ctx_length = request.ContextLength;
        int            snp_length = request.SnippetLength;

        if (queryable == null)
        {
            Console.WriteLine("SnippetExecutor: No queryable object matches '{0}'", request.Hit.Source);
            snippet_reader = new SnippetReader(null, null, false, -1, -1);
            full_text      = false;
        }
        else
        {
            snippet_reader = queryable.GetSnippet(request.QueryTerms, request.Hit, full_text, ctx_length, snp_length);
        }

        List <SnippetLine> snippetlines = new List <SnippetLine> ();

        if (snippet_reader == null)
        {
            return(snippetlines);
        }

        if (!full_text)
        {
            foreach (SnippetLine snippet_line in snippet_reader.GetSnippet())
            {
                snippetlines.Add(snippet_line);
            }
        }
        else
        {
            SnippetLine snippet_line = new SnippetLine();
            snippet_line.Line = 1;

            Fragment fragment = new Fragment();
            fragment.QueryTermIndex = -1;
            StringBuilder sb = new StringBuilder();

            string line;
            // Read data from snippet_reader and write
            while ((line = snippet_reader.ReadLine()) != null)
            {
                sb.Append(StringFu.CleanupInvalidXmlCharacters(line));
                sb.Append("\n");
            }

            fragment.Text          = sb.ToString();
            snippet_line.Fragments = new ArrayList();
            snippet_line.Fragments.Add(fragment);
            snippetlines.Add(snippet_line);
        }

        snippet_reader.Close();

        return(snippetlines);
    }
        private ResponseMessage HandleMessage(RequestMessage msg)
        {
            IndexingServiceRequest isr = (IndexingServiceRequest)msg;

            LuceneQueryable backend = this;

            if (isr.Source != null)
            {
                Queryable target = QueryDriver.GetQueryable(isr.Source);

                if (target == null)
                {
                    string err = String.Format("Unable to find backend matching '{0}'", isr.Source);

                    Log.Error(err);
                    return(new ErrorResponse(err));
                }

                if (!(target.IQueryable is LuceneQueryable))
                {
                    string err = String.Format("Backend '{0}' is not an indexed backend", isr.Source);

                    Log.Error(err);
                    return(new ErrorResponse(err));
                }

                backend = (LuceneQueryable)target.IQueryable;
                Log.Debug("Found backend for IndexingServiceRequest: {0}", backend.IndexName);
            }

            // FIXME: There should be a way for the request to control the
            // scheduler priority of the task.

            if (isr.ToAdd.Count > 0 || isr.ToRemove.Count > 0)
            {
                Log.Debug("IndexingService: Adding {0} indexables, removing {1} indexables.", isr.ToAdd.Count, isr.ToRemove.Count);

                IndexableGenerator ind_gen;
                ind_gen = new IndexableGenerator(isr.ToAdd, isr.ToRemove, this);
                Scheduler.Task task = backend.NewAddTask(ind_gen);
                task.Priority = Scheduler.Priority.Immediate;
                ThisScheduler.Add(task);
            }

            // FIXME: There should be an asynchronous response  (fired by a Scheduler.Hook)
            // that fires when all of the items have been added to the index.

            // No response
            return(new EmptyResponse());
        }
Пример #3
0
        public override void Start()
        {
            base.Start();

            // The FSQ
            Queryable queryable = QueryDriver.GetQueryable("Files");

            this.target_queryable = (FileSystemQueryable.FileSystemQueryable)queryable.IQueryable;

            string fsq_fingerprint = target_queryable.IndexFingerprint;

            InitFileAttributesStore("NautilusMetadata", fsq_fingerprint);

            if (!Directory.Exists(nautilus_dir))
            {
                GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence));
            }
            else
            {
                ExceptionHandlingThread.Start(new ThreadStart(StartWorker));
            }
        }
Пример #4
0
        public HitSnippet[] getSnippets(GetSnippetsRequest req)
        {
            HitSnippet[] response;
            string       searchToken = req.searchToken;

            int[] hitHashCodes = req.hitHashCodes;

            if (!sessionTable.ContainsKey(searchToken))
            {
                response = new HitSnippet[0];
                Logger.Log.Warn("GetSnippets: Invalid Search Token received ");
                return(response);
            }

            ArrayList results = ((SessionData)sessionTable[searchToken]).results;

            if ((results == null) || (results.Count == 0))
            {
                response = new HitSnippet[0];
                Logger.Log.Warn("GetSnippets: Invalid Search Token received ");
                return(response);
            }

            int       i            = 0;
            ArrayList hashCodeList = new ArrayList();

            hashCodeList.AddRange(hitHashCodes);

            response = new HitSnippet[hitHashCodes.Length];
            Logger.Log.Debug("GetSnippets invoked with {0} hitHashCodes", hitHashCodes.Length);

            Query query = ((SessionData)sessionTable[searchToken]).query;

            lock (results.SyncRoot)  {
                string snippet = null;
                foreach (Hit h in results)
                {
                    int hashCode = h.GetHashCode();
                    if (hashCodeList.Contains(hashCode))
                    {
                        hashCodeList.Remove(hashCode);

                        //Queryable queryable = h.SourceObject as Queryable;
                        Queryable queryable = QueryDriver.GetQueryable(h.SourceObjectName);

                        if (queryable == null)
                        {
                            snippet = "ERROR: hit.SourceObject is null, uri=" + h.Uri;
                        }
                        else
                        {
                            snippet = queryable.GetSnippet(ICollection2StringList(query.StemmedText), h);
                        }

                        //GetSnippets always invoked on Target Beagle Node where hits originate:
                        if (snippet == null)
                        {
                            snippet = "";
                        }

                        HitSnippet hs = new HitSnippet();
                        hs.hashCode   = hashCode;
                        hs.snippet    = snippet.Trim();
                        response[i++] = hs;

                        if ((hashCodeList.Count == 0) || (i == hitHashCodes.Length))
                        {
                            return(response);
                        }
                    }
                }         //end foreach
            }             //end lock

            foreach (int hashCode in hashCodeList)
            {
                HitSnippet hs = new HitSnippet();
                hs.hashCode   = hashCode;
                hs.snippet    = InvalidHitSnippetError;
                response[i++] = hs;

                if (i == hitHashCodes.Length)
                {
                    break;
                }
            }
            Logger.Log.Warn("GetSnippets invoked some invalid hitIds");

            return(response);
        }
Пример #5
0
        //Full beagledQuery
        public SearchResult doQuery(SearchRequest sreq, bool isLocalReq)
        {
            SearchResult sr = null;

            if (sreq.text == null || sreq.text.Length == 0 ||
                (sreq.text.Length == 1 && sreq.text[0].Trim() == ""))
            {
                sr            = new SearchResult();
                sr.statusCode = SC_INVALID_QUERY;
                sr.statusMsg  = "Error: No search terms specified";
                return(sr);
            }

            Query query = new Query();

            string searchString = "";

            foreach (string text in sreq.text)
            {
                query.AddText(text);
                searchString += (searchString.Length == 0) ? text:" " + text;
            }

            Logger.Log.Info("WebServiceBackEnd: Received {0} WebService Query with search term: {1}", isLocalReq ? "Local":"External", searchString.Trim());

            if (sreq.mimeType != null && sreq.mimeType[0] != null)
            {
                foreach (string mtype in sreq.mimeType)
                {
                    query.AddMimeType(mtype);
                }
            }

            if (sreq.searchSources != null && sreq.searchSources[0] != null)
            {
                foreach (string src in sreq.searchSources)
                {
                    query.AddSource(src);
                }
            }

            //If needed, check to restrict queries to System or Neighborhood domain, can be added here
            if (sreq.qdomain > 0)
            {
                query.AddDomain(sreq.qdomain);
            }

            if (!isLocalReq)                    //External Request, check if this Node is already processing it

            {
                lock (this) {
                    if ((sreq.searchId != 0) && NetworkedBeagle.IsCachedRequest(sreq.searchId))
                    {
                        sr             = new SearchResult();
                        sr.numResults  = sr.totalResults = sr.firstResultIndex = 0;
                        sr.hitResults  = new HitResult[sr.numResults];
                        sr.searchToken = "";

                        sr.statusCode = SC_DUPLICATE_QUERY;
                        sr.statusMsg  = "Error: Duplicate Query loopback";
                        Logger.Log.Warn("WebServiceBackEnd: Received duplicate Query for a query already in process!");
                        Logger.Log.Warn("WebServiceBackEnd: Check NetBeagle configuration on all nodes to remove possible loops");
                    }

                    if (sreq.hopCount >= 5)
                    {
                        //If request has traversed 5 nodes in reaching here, stop cascading.
                        //Make it a Local Query.
                        query.RemoveDomain(sreq.qdomain);
                        query.AddDomain(QueryDomain.System);
                    }

                    if ((sr == null) && (sreq.searchId != 0))
                    {
                        NetworkedBeagle.CacheRequest(query, sreq.searchId, sreq.hopCount + 1);
                    }
                }

                if (sr != null)
                {
                    return(sr);
                }

                //Logger.Log.Info("New external Query: searchId = {0}", sreq.searchId);
            }

            ArrayList results = ArrayList.Synchronized(new ArrayList());

            QueryResult qres = new QueryResult();

            string searchToken = TokenGenerator();

            SessionData sdata = new SessionData(query, results, isLocalReq);

            AttachQueryResult(qres, sdata);

/* Include this code, if sessionID passed from front-end:
 *                      if (sessionTable.Contains(searchToken))
 *                              sessionTable[searchToken] = sdata;
 *                      else
 */
            sessionTable.Add(searchToken, sdata);

            QueryDriver.DoQueryLocal(query, qres);

            while (resultTable.Contains(qres) && (results.Count < MAX_RESULTS_PER_CALL))
            {
                Thread.Sleep(100);
            }

            //Console.WriteLine("WebServiceBackEnd: Got {0} results from beagled", results.Count);
            sr = new SearchResult();

            if (results.Count > 0)
            {
                lock (results.SyncRoot) {           //Lock results ArrayList to prevent more Hits added till we've processed doQuery
                    sr.numResults = results.Count < MAX_RESULTS_PER_CALL ? results.Count: MAX_RESULTS_PER_CALL;
                    sr.hitResults = new HitResult[sr.numResults];

                    string hitUri;
                    for (int i = 0; i < sr.numResults; i++)
                    {
                        Hit h = (Hit)results[i];

                        string snippet;

                        //Queryable queryable = h.SourceObject as Queryable;
                        Queryable queryable = QueryDriver.GetQueryable(h.SourceObjectName);

                        if (queryable == null)
                        {
                            snippet = "ERROR: hit.SourceObject is null, uri=" + h.Uri;
                        }
                        else
                        {
                            snippet = queryable.GetSnippet(ICollection2StringList(query.StemmedText), h);
                        }

                        sr.hitResults[i] = new HitResult();

                        hitUri = h.UriAsString;
                        if (isLocalReq || hitUri.StartsWith(NetworkedBeagle.BeagleNetPrefix))
                        {
                            sr.hitResults[i].uri = hitUri;
                        }
                        else
                        {
                            sr.hitResults[i].uri = AccessFilter.TranslateHit(h);
                        }

                        sr.hitResults[i].resourceType = h.Type;
                        sr.hitResults[i].mimeType     = h.MimeType;
                        sr.hitResults[i].source       = h.Source;
                        sr.hitResults[i].score        = h.Score;

                        int plen = h.Properties.Count;
                        sr.hitResults[i].properties = new HitProperty[plen];
                        for (int j = 0; j < plen; j++)
                        {
                            Property p = (Property)h.Properties[j];
                            sr.hitResults[i].properties[j]            = new HitProperty();
                            sr.hitResults[i].properties[j].PKey       = p.Key;
                            sr.hitResults[i].properties[j].PVal       = p.Value;
                            sr.hitResults[i].properties[j].IsMutable  = p.IsMutable;
                            sr.hitResults[i].properties[j].IsSearched = p.IsSearched;
                        }

                        sr.hitResults[i].hashCode = h.GetHashCode();

                        if (snippet != null)
                        {
                            sr.hitResults[i].snippet = snippet.Trim();
                        }
                    }
                }         //end lock
            }             // end if
            else
            {
                sr.numResults = 0;
                sr.hitResults = new HitResult[sr.numResults];
            }

            sr.totalResults = results.Count;

            sr.firstResultIndex = 0;
            sr.searchToken      = "";

            if (sr.totalResults > 0)
            {
                sr.searchToken = searchToken;
            }

            sr.statusCode = SC_QUERY_SUCCESS;
            sr.statusMsg  = "Success";
            Logger.Log.Info("WebServiceBackEnd: Total Results = " + sr.totalResults);
            return(sr);
        }