Exemplo n.º 1
0
        public void Test1SentenceDictionaryOnly()
        {
            Analyzer analyzer = Analyzer.NewAnonymous(createComponents: (fieldname, reader) =>
            {
                var loader = new ClasspathResourceLoader(GetType());

                var opennlpFactory = new OpenNLPTokenizerFactory(new Dictionary <string, string> {
                    { "tokenizerModel", tokenizerModelFile }, { "sentenceModel", sentenceModelFile }
                });
                opennlpFactory.Inform(loader);
                var opennlp = opennlpFactory.Create(reader);

                var opennlpPOSFilterFactory = new OpenNLPPOSFilterFactory(new Dictionary <string, string> {
                    { "posTaggerModel", posTaggerModelFile }
                });
                opennlpPOSFilterFactory.Inform(loader);
                var opennlpPOSFilter = opennlpPOSFilterFactory.Create(opennlp);

                var opennlpLemmatizerFilterFactory = new OpenNLPLemmatizerFilterFactory(new Dictionary <string, string> {
                    { "dictionary", lemmatizerDictFile }
                });
                opennlpLemmatizerFilterFactory.Inform(loader);
                var opennlpLemmatizerFilter = opennlpLemmatizerFilterFactory.Create(opennlpPOSFilter);

                return(new TokenStreamComponents(opennlp, opennlpLemmatizerFilter));
            });

            //CustomAnalyzer analyzer = CustomAnalyzer.builder(new ClasspathResourceLoader(getClass()))
            //.withTokenizer("opennlp", "tokenizerModel", tokenizerModelFile, "sentenceModel", sentenceModelFile)
            //.addTokenFilter("opennlpPOS", "posTaggerModel", "en-test-pos-maxent.bin")
            //.addTokenFilter("opennlplemmatizer", "dictionary", "en-test-lemmas.dict")
            //.build();
            AssertAnalyzesTo(analyzer, SENTENCE, SENTENCE_dict_punc, null, null,
                             SENTENCE_posTags, null, null, true);
        }
        public void TestBasic()
        {
            var      loader   = new ClasspathResourceLoader(GetType());
            Analyzer analyzer = Analyzer.NewAnonymous(createComponents: (fieldname, reader) => {
                var tokenizerFactory = new OpenNLPTokenizerFactory(new Dictionary <string, string> {
                    { "tokenizerModel", tokenizerModelFile }, { "sentenceModel", sentenceModelFile }
                });
                tokenizerFactory.Inform(loader);
                var tokenizer = tokenizerFactory.Create(reader);

                var filter1Factory = new OpenNLPPOSFilterFactory(new Dictionary <string, string> {
                    { "posTaggerModel", posTaggerModelFile }
                });
                filter1Factory.Inform(loader);
                var filter1 = filter1Factory.Create(tokenizer);

                return(new TokenStreamComponents(tokenizer, filter1));
            });

            //    CustomAnalyzer analyzer = CustomAnalyzer.builder(new ClasspathResourceLoader(GetType()))
            //.withTokenizer("opennlp", "tokenizerModel", tokenizerModelFile, "sentenceModel", sentenceModelFile)
            //.addTokenFilter("opennlpPOS", "posTaggerModel", posTaggerModelFile)
            //.build();
            AssertAnalyzesTo(analyzer, SENTENCES, SENTENCES_punc, SENTENCES_startOffsets, SENTENCES_endOffsets);
        }
Exemplo n.º 3
0
        public void TestKeywordAttributeAwarenessDictionaryAndMaxEnt()
        {
            Analyzer analyzer = Analyzer.NewAnonymous(createComponents: (fieldname, reader) =>
            {
                var loader = new ClasspathResourceLoader(GetType());

                var opennlpFactory = new OpenNLPTokenizerFactory(new Dictionary <string, string> {
                    { "tokenizerModel", tokenizerModelFile }, { "sentenceModel", sentenceModelFile }
                });
                opennlpFactory.Inform(loader);
                var opennlp = opennlpFactory.Create(reader);

                var opennlpPOSFilterFactory = new OpenNLPPOSFilterFactory(new Dictionary <string, string> {
                    { "posTaggerModel", posTaggerModelFile }
                });
                opennlpPOSFilterFactory.Inform(loader);
                var opennlpPOSFilter = opennlpPOSFilterFactory.Create(opennlp);

                var keywordRepeatFilterFactory = new KeywordRepeatFilterFactory(new Dictionary <string, string>());
                var keywordRepeatFilter        = keywordRepeatFilterFactory.Create(opennlpPOSFilter);

                var opennlpLemmatizerFilterFactory = new OpenNLPLemmatizerFilterFactory(new Dictionary <string, string> {
                    { "dictionary", lemmatizerDictFile }, { "lemmatizerModel", lemmatizerModelFile }
                });
                opennlpLemmatizerFilterFactory.Inform(loader);
                var opennlpLemmatizerFilter = opennlpLemmatizerFilterFactory.Create(keywordRepeatFilter);

                var removeDuplicatesTokenFilterFactory = new RemoveDuplicatesTokenFilterFactory(new Dictionary <string, string>());
                var removeDuplicatesTokenFilter        = removeDuplicatesTokenFilterFactory.Create(opennlpLemmatizerFilter);

                return(new TokenStreamComponents(opennlp, removeDuplicatesTokenFilter));
            });

            //CustomAnalyzer analyzer = CustomAnalyzer.builder(new ClasspathResourceLoader(getClass()))
            //    .withTokenizer("opennlp", "tokenizerModel", tokenizerModelFile, "sentenceModel", sentenceModelFile)
            //    .addTokenFilter("opennlpPOS", "posTaggerModel", posTaggerModelFile)
            //    .addTokenFilter(KeywordRepeatFilterFactory.class)
            //    .addTokenFilter("opennlplemmatizer", "dictionary", lemmatizerDictFile, "lemmatizerModel", lemmatizerModelFile)
            //    .addTokenFilter(RemoveDuplicatesTokenFilterFactory.class)
            //    .build();
            AssertAnalyzesTo(analyzer, SENTENCES_both, SENTENCES_both_keep_orig_punc, null, null,
                             SENTENCES_both_keep_orig_posTags, null, null, true);
        }
        public void TestPayloads()
        {
            //CustomAnalyzer analyzer = CustomAnalyzer.builder(new ClasspathResourceLoader(getClass()))
            //.withTokenizer("opennlp", "tokenizerModel", tokenizerModelFile, "sentenceModel", sentenceModelFile)
            //.addTokenFilter("opennlpPOS", "posTaggerModel", posTaggerModelFile)
            //.addTokenFilter("opennlpChunker", "chunkerModel", chunkerModelFile)
            //.addTokenFilter(TypeAsPayloadTokenFilterFactory.class)
            //.build();

            Analyzer analyzer = Analyzer.NewAnonymous(createComponents: (fieldName, reader) =>
            {
                var loader = new ClasspathResourceLoader(GetType());

                var opennlpFactory = new OpenNLPTokenizerFactory(new Dictionary <string, string> {
                    { "tokenizerModel", tokenizerModelFile }, { "sentenceModel", sentenceModelFile }
                });
                opennlpFactory.Inform(loader);
                var opennlp = opennlpFactory.Create(NewAttributeFactory(), reader); //new OpenNLPTokenizer(reader, new Tools.NLPSentenceDetectorOp(sentenceModelFile), new Tools.NLPTokenizerOp(tokenizerModelFile));

                var opennlpPOSFilterFactory = new OpenNLPPOSFilterFactory(new Dictionary <string, string> {
                    { "posTaggerModel", posTaggerModelFile }
                });
                opennlpPOSFilterFactory.Inform(loader);
                var opennlpPOSFilter = opennlpPOSFilterFactory.Create(opennlp);  //new OpenNLPPOSFilter(opennlp, new Tools.NLPPOSTaggerOp(posTaggerModelFile));

                var opennlpChunkerFilterFactory = new OpenNLPChunkerFilterFactory(new Dictionary <string, string> {
                    { "chunkerModel", chunkerModelFile }
                });
                opennlpChunkerFilterFactory.Inform(loader);
                var opennlpChunkerFilter = opennlpChunkerFilterFactory.Create(opennlpPOSFilter);  //new OpenNLPChunkerFilter(filter1, new Tools.NLPChunkerOp(chunkerModelFile));

                var typeAsPayloadFilterFactory = new TypeAsPayloadTokenFilterFactory(new Dictionary <string, string>());
                var typeAsPayloadFilter        = typeAsPayloadFilterFactory.Create(opennlpChunkerFilter);

                return(new TokenStreamComponents(opennlp, typeAsPayloadFilter));
            });

            AssertAnalyzesTo(analyzer, SENTENCES, SENTENCES_punc, SENTENCES_startOffsets, SENTENCES_endOffsets,
                             null, null, null, true, ToPayloads(SENTENCES_chunks));
        }