Example #1
0
        /// <summary>
        /// Evaluates the specified chunk samples.
        /// </summary>
        /// <param name="samples">The chunk samples to be evaluated.</param>
        /// <param name="partitions">The partitions (folds).</param>
        public void Evaluate(IObjectStream<ChunkSample> samples, int partitions) {
            var partitioner = new CrossValidationPartitioner<ChunkSample>(samples, partitions);

            while (partitioner.HasNext) {

                var trainingSampleStream = partitioner.Next();

                var model = ChunkerME.Train(languageCode, trainingSampleStream, parameters, chunkerFactory);

                var evaluator = new ChunkerEvaluator(new ChunkerME(model), listeners);

                evaluator.Evaluate(trainingSampleStream.GetTestSampleStream());

                FMeasure.MergeInto(evaluator.FMeasure);
            }
        }
        public void TestEvaluatorNoError() {
            using (
                var predictedSample =
                    new DummyChunkSampleStream(
                        new PlainTextByLineStream(Tests.OpenFile("opennlp/tools/chunker/output.txt")), true))
            using (
                var expectedSample =
                    new DummyChunkSampleStream(
                        new PlainTextByLineStream(Tests.OpenFile("opennlp/tools/chunker/output.txt")), true)) {
                var dummyChunker = new DummyChunker(predictedSample);

                var evaluator = new ChunkerEvaluator(dummyChunker);

                evaluator.Evaluate(expectedSample);

                Assert.AreEqual(1d, evaluator.FMeasure.PrecisionScore, DELTA);
                Assert.AreEqual(1d, evaluator.FMeasure.RecallScore, DELTA);
            }
        }
Example #3
0
        public void TestEvaluatorNoError()
        {
            using (
                var predictedSample =
                    new DummyChunkSampleStream(
                        new PlainTextByLineStream(Tests.OpenFile("opennlp/tools/chunker/output.txt")), true))
                using (
                    var expectedSample =
                        new DummyChunkSampleStream(
                            new PlainTextByLineStream(Tests.OpenFile("opennlp/tools/chunker/output.txt")), true)) {
                    var dummyChunker = new DummyChunker(predictedSample);

                    var evaluator = new ChunkerEvaluator(dummyChunker);

                    evaluator.Evaluate(expectedSample);

                    Assert.AreEqual(1d, evaluator.FMeasure.PrecisionScore, DELTA);
                    Assert.AreEqual(1d, evaluator.FMeasure.RecallScore, DELTA);
                }
        }
        public void TestEvaluator() {
            using (
                var predictedSample =
                    new DummyChunkSampleStream(
                        new PlainTextByLineStream(Tests.OpenFile("opennlp/tools/chunker/output.txt")), true))
            using (
                var expectedSample =
                    new DummyChunkSampleStream(
                        new PlainTextByLineStream(Tests.OpenFile("opennlp/tools/chunker/output.txt")), false)) {
                var dummyChunker = new DummyChunker(predictedSample);

                //var stream = new ByteArrayOutputStream();
                //ChunkerEvaluationMonitor listener = new ChunkEvaluationErrorListener(stream);
                var evaluator = new ChunkerEvaluator(dummyChunker);

                evaluator.Evaluate(expectedSample);

                Assert.AreEqual(0.8d, evaluator.FMeasure.PrecisionScore, DELTA);
                Assert.AreEqual(0.875d, evaluator.FMeasure.RecallScore, DELTA);

                //assertNotSame(stream.toString().length(), 0);
            }
        }
Example #5
0
        public void TestEvaluator()
        {
            using (
                var predictedSample =
                    new DummyChunkSampleStream(
                        new PlainTextByLineStream(Tests.OpenFile("opennlp/tools/chunker/output.txt")), true))
                using (
                    var expectedSample =
                        new DummyChunkSampleStream(
                            new PlainTextByLineStream(Tests.OpenFile("opennlp/tools/chunker/output.txt")), false)) {
                    var dummyChunker = new DummyChunker(predictedSample);

                    //var stream = new ByteArrayOutputStream();
                    //ChunkerEvaluationMonitor listener = new ChunkEvaluationErrorListener(stream);
                    var evaluator = new ChunkerEvaluator(dummyChunker);

                    evaluator.Evaluate(expectedSample);

                    Assert.AreEqual(0.8d, evaluator.FMeasure.PrecisionScore, DELTA);
                    Assert.AreEqual(0.875d, evaluator.FMeasure.RecallScore, DELTA);

                    //assertNotSame(stream.toString().length(), 0);
                }
        }
        public override void run(string format, string[] args)
        {
            base.run(format, args);

            ChunkerModel model = (new ChunkerModelLoader()).load(@params.Model);

            IList <EvaluationMonitor <ChunkSample> > listeners = new LinkedList <EvaluationMonitor <ChunkSample> >();
            ChunkerDetailedFMeasureListener          detailedFMeasureListener = null;

            if (@params.Misclassified.Value)
            {
                listeners.Add(new ChunkEvaluationErrorListener());
            }
            if (@params.DetailedF.Value)
            {
                detailedFMeasureListener = new ChunkerDetailedFMeasureListener();
                listeners.Add(detailedFMeasureListener);
            }

            ChunkerEvaluator evaluator = new ChunkerEvaluator(new ChunkerME(model, ChunkerME.DEFAULT_BEAM_SIZE), listeners.ToArray());

            PerformanceMonitor monitor = new PerformanceMonitor("sent");

            ObjectStream <ChunkSample> measuredSampleStream = new ObjectStreamAnonymousInnerClassHelper(this, monitor);

            monitor.startAndPrintThroughput();

            try
            {
                evaluator.evaluate(measuredSampleStream);
            }
            catch (IOException e)
            {
                Console.Error.WriteLine("failed");
                throw new TerminateToolException(-1, "IO error while reading test data: " + e.Message, e);
            }
            finally
            {
                try
                {
                    measuredSampleStream.close();
                }
                catch (IOException)
                {
                    // sorry that this can fail
                }
            }

            monitor.stopAndPrintFinalResult();

            Console.WriteLine();

            if (detailedFMeasureListener == null)
            {
                Console.WriteLine(evaluator.FMeasure);
            }
            else
            {
                Console.WriteLine(detailedFMeasureListener.ToString());
            }
        }