public void TestNegative() { // TODO: Add the listener var pred = CreateSimpleNameSampleB().Names; var eval = new TokenNameFinderEvaluator(new DummyNameFinder(pred)); eval.EvaluateSample(CreateSimpleNameSampleA()); Assert.AreEqual(0.8d, eval.FMeasure.Value); }
public void TestEvaluator() { var nameFinder = new DictionaryNameFinder(CreateDictionary()); // TODO: Add the evaluation listener. var evaluator = new TokenNameFinderEvaluator(nameFinder); var sample = CreateSample(); evaluator.Evaluate(sample); sample.Dispose(); Assert.AreEqual(1d, evaluator.FMeasure.Value); Assert.AreEqual(1d, evaluator.FMeasure.RecallScore); }
public override void run(string format, string[] args) { base.run(format, args); TokenNameFinderModel model = (new TokenNameFinderModelLoader()).load(@params.Model); IList <EvaluationMonitor <NameSample> > listeners = new LinkedList <EvaluationMonitor <NameSample> >(); if (@params.Misclassified.Value) { listeners.Add(new NameEvaluationErrorListener()); } TokenNameFinderDetailedFMeasureListener detailedFListener = null; if (@params.DetailedF.Value) { detailedFListener = new TokenNameFinderDetailedFMeasureListener(); listeners.Add(detailedFListener); } TokenNameFinderEvaluator evaluator = new TokenNameFinderEvaluator(new NameFinderME(model), listeners.ToArray()); PerformanceMonitor monitor = new PerformanceMonitor("sent"); ObjectStream <NameSample> measuredSampleStream = new ObjectStreamAnonymousInnerClassHelper(this, monitor); monitor.startAndPrintThroughput(); try { evaluator.evaluate(measuredSampleStream); } catch (IOException e) { Console.Error.WriteLine("failed"); throw new TerminateToolException(-1, "IO error while reading test data: " + e.Message, e); } finally { try { measuredSampleStream.close(); } catch (IOException) { // sorry that this can fail } } monitor.stopAndPrintFinalResult(); Console.WriteLine(); if (detailedFListener == null) { Console.WriteLine(evaluator.FMeasure); } else { Console.WriteLine(detailedFListener.ToString()); } }