Esempio n. 1
0
        /// <summary>
        /// Run the quality benchmark.
        /// </summary>
        /// <param name="judge">
        /// The judge that can tell if a certain result doc is relevant for a certain quality query.
        /// If null, no judgements would be made. Usually null for a submission run.
        /// </param>
        /// <param name="submitRep">Submission report is created if non null.</param>
        /// <param name="qualityLog">If not null, quality run data would be printed for each query.</param>
        /// <returns><see cref="QualityStats"/> of each quality query that was executed.</returns>
        /// <exception cref="Exception">If quality benchmark failed to run.</exception>
        public virtual QualityStats[] Execute(IJudge judge, SubmissionReport submitRep,
                                              TextWriter qualityLog)
        {
            int nQueries = Math.Min(maxQueries, m_qualityQueries.Length);

            QualityStats[] stats = new QualityStats[nQueries];
            for (int i = 0; i < nQueries; i++)
            {
                QualityQuery qq = m_qualityQueries[i];
                // generate query
                Query q = m_qqParser.Parse(qq);
                // search with this query
                long    t1         = J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond;        // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
                TopDocs td         = m_searcher.Search(q, null, maxResults);
                long    searchTime = (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) - t1; // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
                //most likely we either submit or judge, but check both
                if (judge != null)
                {
                    stats[i] = AnalyzeQueryResults(qq, q, td, judge, qualityLog, searchTime);
                }
                if (submitRep != null)
                {
                    submitRep.Report(qq, td, m_docNameField, m_searcher);
                }
            }
            if (submitRep != null)
            {
                submitRep.Flush();
            }
            return(stats);
        }
Esempio n. 2
0
        public void TestTrecTopicsReader()
        {
            // prepare topics
            Stream           topicsFile = GetType().getResourceAsStream("trecTopics.txt");
            TrecTopicsReader qReader    = new TrecTopicsReader();

            QualityQuery[] qqs = qReader.ReadQueries(
                new StreamReader(topicsFile, Encoding.UTF8));


            assertEquals(20, qqs.Length);

            QualityQuery qq = qqs[0];

            assertEquals("statement months  total 1987", qq.GetValue("title"));
            assertEquals("Topic 0 Description Line 1 Topic 0 Description Line 2",
                         qq.GetValue("description"));
            assertEquals("Topic 0 Narrative Line 1 Topic 0 Narrative Line 2",
                         qq.GetValue("narrative"));

            qq = qqs[1];
            assertEquals("agreed 15  against five", qq.GetValue("title"));
            assertEquals("Topic 1 Description Line 1 Topic 1 Description Line 2",
                         qq.GetValue("description"));
            assertEquals("Topic 1 Narrative Line 1 Topic 1 Narrative Line 2",
                         qq.GetValue("narrative"));

            qq = qqs[19];
            assertEquals("20 while  common week", qq.GetValue("title"));
            assertEquals("Topic 19 Description Line 1 Topic 19 Description Line 2",
                         qq.GetValue("description"));
            assertEquals("Topic 19 Narrative Line 1 Topic 19 Narrative Line 2",
                         qq.GetValue("narrative"));
        }
Esempio n. 3
0
        /// <summary>Analyze/judge results for a single quality query; optionally log them.</summary>
        private QualityStats AnalyzeQueryResults(QualityQuery qq, Query q, TopDocs td, IJudge judge, TextWriter logger, long searchTime)
        {
            QualityStats stts = new QualityStats(judge.MaxRecall(qq), searchTime);

            ScoreDoc[]       sd = td.ScoreDocs;
            long             t1 = Support.Time.CurrentTimeMilliseconds(); // extraction of first doc name we measure also construction of doc name extractor, just in case.
            DocNameExtractor xt = new DocNameExtractor(m_docNameField);

            for (int i = 0; i < sd.Length; i++)
            {
                string docName            = xt.DocName(m_searcher, sd[i].Doc);
                long   docNameExtractTime = Support.Time.CurrentTimeMilliseconds() - t1;
                t1 = Support.Time.CurrentTimeMilliseconds();
                bool isRelevant = judge.IsRelevant(docName, qq);
                stts.AddResult(i + 1, isRelevant, docNameExtractTime);
            }
            if (logger != null)
            {
                logger.WriteLine(qq.QueryID + "  -  " + q);
                stts.Log(qq.QueryID + " Stats:", 1, logger, "  ");
            }
            return(stts);
        }