Example #1
0
        /// <summary>
        /// Run the quality benchmark.
        /// </summary>
        /// <param name="judge">
        /// The judge that can tell if a certain result doc is relevant for a certain quality query.
        /// If null, no judgements would be made. Usually null for a submission run.
        /// </param>
        /// <param name="submitRep">Submission report is created if non null.</param>
        /// <param name="qualityLog">If not null, quality run data would be printed for each query.</param>
        /// <returns><see cref="QualityStats"/> of each quality query that was executed.</returns>
        /// <exception cref="Exception">If quality benchmark failed to run.</exception>
        public virtual QualityStats[] Execute(IJudge judge, SubmissionReport submitRep,
                                              TextWriter qualityLog)
        {
            int nQueries = Math.Min(maxQueries, m_qualityQueries.Length);

            QualityStats[] stats = new QualityStats[nQueries];
            for (int i = 0; i < nQueries; i++)
            {
                QualityQuery qq = m_qualityQueries[i];
                // generate query
                Query q = m_qqParser.Parse(qq);
                // search with this query
                long    t1         = J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond;        // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
                TopDocs td         = m_searcher.Search(q, null, maxResults);
                long    searchTime = (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) - t1; // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
                //most likely we either submit or judge, but check both
                if (judge != null)
                {
                    stats[i] = AnalyzeQueryResults(qq, q, td, judge, qualityLog, searchTime);
                }
                if (submitRep != null)
                {
                    submitRep.Report(qq, td, m_docNameField, m_searcher);
                }
            }
            if (submitRep != null)
            {
                submitRep.Flush();
            }
            return(stats);
        }
Example #2
0
        public SubmissionReport Submit(int experimentID, string experimentSpecification, string userGroup, int priorityHint)
        {
            const string STRLOG_MethodName = "Submit";

            Logfile.WriteCalled(STRLOG_ClassName, STRLOG_MethodName);

            SubmissionReport submissionReport = null;

            // Get the identity of the caller
            string sbName = GetCallerName(authHeader);

            // Check caller access is authorised
            if (sbName != null)
            {
                // Pass on to experiment manager
                submissionReport = Global.experimentManager.Submit(experimentID, sbName,
                                                                   experimentSpecification, userGroup, priorityHint);
            }
            else
            {
                submissionReport = new SubmissionReport(experimentID);
                submissionReport.vReport.errorMessage = STRLOG_AccessDenied;
            }

            Logfile.WriteCompleted(STRLOG_ClassName, STRLOG_MethodName);

            return(submissionReport);
        }
        public SubmissionReport Submit(string labServerID, string experimentSpecification,
                                       int priorityHint, bool emailNotification)
        {
            LabServerWebService labServer        = GetLabServer(sbHeader);
            SubmissionReport    submissionReport = labServer.Submit(nextExperimentId, experimentSpecification, userGroup, priorityHint);

            if (submissionReport.vReport.accepted == true)
            {
                // Go to next experiment number
                nextExperimentId = GetNextExperimentID();
            }

            return(submissionReport);
        }
        //-------------------------------------------------------------------------------------------------//
        private SubmissionReport ConvertType(Proxy.SubmissionReport proxySubmissionReport)
        {
            SubmissionReport submissionReport = null;

            if (proxySubmissionReport != null)
            {
                submissionReport = new SubmissionReport();
                submissionReport.ExperimentId = proxySubmissionReport.experimentID;
                submissionReport.MinTimeToLive = proxySubmissionReport.minTimeToLive;
                submissionReport.ValidationReport = this.ConvertType(proxySubmissionReport.vReport);
                submissionReport.WaitEstimate = this.ConvertType(proxySubmissionReport.wait);
            }

            return submissionReport;
        }
Example #5
0
        public static void Main(string[] args)
        {
            if (args.Length < 4 || args.Length > 5)
            {
                // LUCENENET specific - our wrapper console shows correct usage
                throw new ArgumentException();
                //Console.Error.WriteLine("Usage: QueryDriver <topicsFile> <qrelsFile> <submissionFile> <indexDir> [querySpec]");
                //Console.Error.WriteLine("topicsFile: input file containing queries");
                //Console.Error.WriteLine("qrelsFile: input file containing relevance judgements");
                //Console.Error.WriteLine("submissionFile: output submission file for trec_eval");
                //Console.Error.WriteLine("indexDir: index directory");
                //Console.Error.WriteLine("querySpec: string composed of fields to use in query consisting of T=title,D=description,N=narrative:");
                //Console.Error.WriteLine("\texample: TD (query on Title + Description). The default is T (title only)");
                //Environment.Exit(1);
            }

            FileInfo         topicsFile = new FileInfo(args[0]);
            FileInfo         qrelsFile  = new FileInfo(args[1]);
            SubmissionReport submitLog  = new SubmissionReport(new StreamWriter(new FileStream(args[2], FileMode.Create, FileAccess.Write), Encoding.UTF8 /* huh, no nio.Charset ctor? */), "lucene");

            using Store.FSDirectory dir = Store.FSDirectory.Open(new DirectoryInfo(args[3]));
            using IndexReader reader    = DirectoryReader.Open(dir);
            string        fieldSpec = args.Length == 5 ? args[4] : "T"; // default to Title-only if not specified.
            IndexSearcher searcher  = new IndexSearcher(reader);

            int    maxResults   = 1000;
            string docNameField = "docname";

            TextWriter logger = Console.Out; //new StreamWriter(Console, Encoding.GetEncoding(0));

            // use trec utilities to read trec topics into quality queries
            TrecTopicsReader qReader = new TrecTopicsReader();

            QualityQuery[] qqs = qReader.ReadQueries(IOUtils.GetDecodingReader(topicsFile, Encoding.UTF8));

            // prepare judge, with trec utilities that read from a QRels file
            IJudge judge = new TrecJudge(IOUtils.GetDecodingReader(qrelsFile, Encoding.UTF8));

            // validate topics & judgments match each other
            judge.ValidateData(qqs, logger);

            ISet <string> fieldSet = new JCG.HashSet <string>();

            if (fieldSpec.IndexOf('T') >= 0)
            {
                fieldSet.Add("title");
            }
            if (fieldSpec.IndexOf('D') >= 0)
            {
                fieldSet.Add("description");
            }
            if (fieldSpec.IndexOf('N') >= 0)
            {
                fieldSet.Add("narrative");
            }

            // set the parsing of quality queries into Lucene queries.
            IQualityQueryParser qqParser = new SimpleQQParser(fieldSet.ToArray(), "body");

            // run the benchmark
            QualityBenchmark qrun = new QualityBenchmark(qqs, qqParser, searcher, docNameField);

            qrun.MaxResults = maxResults;
            QualityStats[] stats = qrun.Execute(judge, submitLog, logger);

            // print an avarage sum of the results
            QualityStats avg = QualityStats.Average(stats);

            avg.Log("SUMMARY", 2, logger, "  ");
        }
        public SubmissionReport Submit(int experimentID, string experimentSpecification, string userGroup, int priorityHint)
        {
            const string STRLOG_MethodName = "Submit";

            Logfile.WriteCalled(STRLOG_ClassName, STRLOG_MethodName);

            SubmissionReport submissionReport = null;

            // Get the identity of the caller
            string sbName = GetCallerName(authHeader);

            // Check caller access is authorised
            if (sbName != null)
            {
                // Pass on to experiment manager
                submissionReport = Global.experimentManager.Submit(experimentID, sbName,
                    experimentSpecification, userGroup, priorityHint);
            }
            else
            {
                submissionReport = new SubmissionReport(experimentID);
                submissionReport.vReport.errorMessage = STRLOG_AccessDenied;
            }

            Logfile.WriteCompleted(STRLOG_ClassName, STRLOG_MethodName);

            return submissionReport;
        }
        //-------------------------------------------------------------------------------------------------//
        public SubmissionReport Submit(int experimentID, string sbName, string experimentSpecification,
            string userGroup, int priorityHint)
        {
            SubmissionReport submissionReport = null;

            //
            // Create a SubmissionReport object ready to fill in and return
            //
            submissionReport = new SubmissionReport(experimentID);

            //
            // Validate the experiment specification before submitting
            //
            ValidationReport validationReport = Validate(experimentSpecification, userGroup);
            if (validationReport.accepted == false)
            {
                // Experiment specification is invalid, cannot submit
                submissionReport.vReport = validationReport;
                return submissionReport;
            }

            //
            // Create an instance of the experiment
            //
            ExperimentInfo experimentInfo = new ExperimentInfo(experimentID, sbName,
                userGroup, priorityHint, experimentSpecification, (int)validationReport.estRuntime);

            //
            // Add the experiment to the queue
            //
            QueuedExperimentInfo queuedExperimentInfo = this.experimentQueue.Enqueue(experimentInfo);
            if (queuedExperimentInfo != null)
            {
                //
                // Update submission report
                //
                submissionReport.vReport.accepted = true;
                submissionReport.vReport.estRuntime = queuedExperimentInfo.estExecutionTime;
                submissionReport.wait = new WaitEstimate(queuedExperimentInfo.queueLength, queuedExperimentInfo.waitTime);

                //
                // Get minimum remaining runtime of any currently running experiments and add into the wait estimate
                //
                int minRemainingRuntime = GetMinRemainingRuntime();
                submissionReport.wait.estWait += minRemainingRuntime;

                //
                // Update the statistics with revised wait estimate
                //
                queuedExperimentInfo.waitTime = (int)submissionReport.wait.estWait;
                this.experimentStatistics.Submitted(queuedExperimentInfo, DateTime.Now);

                // Tell lab experiment manager thread that an experiment has been submitted
                this.SignalSubmitted();
            }
            else
            {
                //
                // Failed to add experiment to the queue
                //
                submissionReport.vReport.accepted = true;
                submissionReport.vReport.errorMessage = STRERR_FailedToQueueExperiment;
            }

            return submissionReport;
        }
Example #8
0
        public void TestTrecQuality()
        {
            // first create the partial reuters index
            createReutersIndex();


            int    maxResults   = 1000;
            String docNameField = "doctitle"; // orig docID is in the linedoc format title

            TextWriter logger = VERBOSE ? Console.Out : null;

            // prepare topics
            Stream           topics  = GetType().getResourceAsStream("trecTopics.txt");
            TrecTopicsReader qReader = new TrecTopicsReader();

            QualityQuery[] qqs = qReader.ReadQueries(new StreamReader(topics, Encoding.UTF8));

            // prepare judge
            Stream qrels = GetType().getResourceAsStream("trecQRels.txt");
            IJudge judge = new TrecJudge(new StreamReader(qrels, Encoding.UTF8));

            // validate topics & judgments match each other
            judge.ValidateData(qqs, logger);

            Store.Directory dir      = NewFSDirectory(new DirectoryInfo(System.IO.Path.Combine(getWorkDir().FullName, "index")));
            IndexReader     reader   = DirectoryReader.Open(dir);
            IndexSearcher   searcher = new IndexSearcher(reader);

            IQualityQueryParser qqParser = new SimpleQQParser("title", "body");
            QualityBenchmark    qrun     = new QualityBenchmark(qqs, qqParser, searcher, docNameField);

            SubmissionReport submitLog = VERBOSE ? new SubmissionReport(logger, "TestRun") : null;

            qrun.MaxResults = (maxResults);
            QualityStats[] stats = qrun.Execute(judge, submitLog, logger);

            // --------- verify by the way judgments were altered for this test:
            // for some queries, depending on m = qnum % 8
            // m==0: avg_precision and recall are hurt, by marking fake docs as relevant
            // m==1: precision_at_n and avg_precision are hurt, by unmarking relevant docs
            // m==2: all precision, precision_at_n and recall are hurt.
            // m>=3: these queries remain perfect
            for (int i = 0; i < stats.Length; i++)
            {
                QualityStats s = stats[i];
                switch (i % 8)
                {
                case 0:
                    assertTrue("avg-p should be hurt: " + s.GetAvp(), 1.0 > s.GetAvp());
                    assertTrue("recall should be hurt: " + s.Recall, 1.0 > s.Recall);
                    for (int j = 1; j <= QualityStats.MAX_POINTS; j++)
                    {
                        assertEquals("p_at_" + j + " should be perfect: " + s.GetPrecisionAt(j), 1.0, s.GetPrecisionAt(j), 1E-2);
                    }
                    break;

                case 1:
                    assertTrue("avg-p should be hurt", 1.0 > s.GetAvp());
                    assertEquals("recall should be perfect: " + s.Recall, 1.0, s.Recall, 1E-2);
                    for (int j = 1; j <= QualityStats.MAX_POINTS; j++)
                    {
                        assertTrue("p_at_" + j + " should be hurt: " + s.GetPrecisionAt(j), 1.0 > s.GetPrecisionAt(j));
                    }
                    break;

                case 2:
                    assertTrue("avg-p should be hurt: " + s.GetAvp(), 1.0 > s.GetAvp());
                    assertTrue("recall should be hurt: " + s.Recall, 1.0 > s.Recall);
                    for (int j = 1; j <= QualityStats.MAX_POINTS; j++)
                    {
                        assertTrue("p_at_" + j + " should be hurt: " + s.GetPrecisionAt(j), 1.0 > s.GetPrecisionAt(j));
                    }
                    break;

                default:
                {
                    assertEquals("avg-p should be perfect: " + s.GetAvp(), 1.0, s.GetAvp(), 1E-2);
                    assertEquals("recall should be perfect: " + s.Recall, 1.0, s.Recall, 1E-2);
                    for (int j = 1; j <= QualityStats.MAX_POINTS; j++)
                    {
                        assertEquals("p_at_" + j + " should be perfect: " + s.GetPrecisionAt(j), 1.0, s.GetPrecisionAt(j), 1E-2);
                    }
                    break;
                }
                }
            }

            QualityStats avg = QualityStats.Average(stats);

            if (logger != null)
            {
                avg.Log("Average statistis:", 1, logger, "  ");
            }


            assertTrue("mean avg-p should be hurt: " + avg.GetAvp(), 1.0 > avg.GetAvp());
            assertTrue("avg recall should be hurt: " + avg.Recall, 1.0 > avg.Recall);
            for (int j = 1; j <= QualityStats.MAX_POINTS; j++)
            {
                assertTrue("avg p_at_" + j + " should be hurt: " + avg.GetPrecisionAt(j), 1.0 > avg.GetPrecisionAt(j));
            }

            reader.Dispose();
            dir.Dispose();
        }
        //-------------------------------------------------------------------------------------------------//

        protected void btnSubmit_Click1(object sender, EventArgs e)
        {
            //
            // Check if an experiment has already been submitted. The Submit button's
            // enable state gets set when page is loaded.
            //
            if (btnSubmit.Enabled == false)
            {
                return;
            }

            try
            {
                // Build the XML specification string
                string xmlSpecification = this.BuildSpecification();

                //
                // Submit the experiment specification
                //
                SubmissionReport submissionReport = Master.ServiceBroker.Submit(xmlSpecification);
                if (submissionReport.vReport.accepted == true)
                {
                    //
                    // Submission was accepted
                    //
                    ShowMessageNormal(STR_SubmissionSuccessful +
                                      FormatSubmission(submissionReport.experimentID, (int)submissionReport.vReport.estRuntime));

                    //
                    // Update session with submitted experiment ID
                    //
                    if (Master.MultiSubmit == true)
                    {
                        // Add experiment ID to the list in the session
                        if (Session[Consts.STRSSN_SubmittedIDs] != null)
                        {
                            // Get the list of submitted experiment IDs
                            int[] submittedIDs = (int[])Session[Consts.STRSSN_SubmittedIDs];

                            // Create a bigger array and copy submitted experiment IDs
                            int[] newSubmittedIDs = new int[submittedIDs.Length + 1];
                            submittedIDs.CopyTo(newSubmittedIDs, 0);

                            // Add the experiment ID to the bigger array
                            newSubmittedIDs[submittedIDs.Length] = submissionReport.experimentID;

                            // Save experiment IDs in the session
                            Session[Consts.STRSSN_SubmittedIDs] = newSubmittedIDs;
                        }
                        else
                        {
                            // Create an array and add the experiment ID
                            int[] submittedIDs = new int[1];
                            submittedIDs[0] = submissionReport.experimentID;

                            // Save experiment IDs in the session
                            Session[Consts.STRSSN_SubmittedIDs] = submittedIDs;
                        }
                    }
                    else
                    {
                        // Save experiment ID in the session
                        Session[Consts.STRSSN_SubmittedID] = submissionReport.experimentID;

                        // Update buttons
                        btnValidate.Enabled = false;
                        btnSubmit.Enabled   = false;
                    }
                }
                else
                {
                    // Submission was rejected
                    this.ShowMessageError(submissionReport.vReport.errorMessage);
                }
            }
            catch (Exception ex)
            {
                // LabServer error
                Logfile.WriteError(ex.Message);
                this.ShowMessageFailure(STRERR_SubmissionFailed);
            }
        }