Example #1
0
 private static opennlp.tools.parser.ParserModel OpenJavaModel(string fileName)
 {
     java.io.FileInputStream inputStream = null;
     try {
         inputStream = OpenNLP.OpenInputStream(fileName);
         return(new opennlp.tools.parser.ParserModel(inputStream));
     } finally {
         inputStream?.close();
     }
 }
Example #2
0
        private static opennlp.tools.chunker.ChunkSampleStream JavaSampleStream()
        {
            return(new opennlp.tools.chunker.ChunkSampleStream(
                       // we don't care about warnings on java
#pragma warning disable 612
                       new opennlp.tools.util.PlainTextByLineStream(
                           OpenNLP.OpenInputStream("opennlp/tools/chunker/test.txt"), "utf-8")));

#pragma warning restore 612
        }
Example #3
0
 //
 // DO NOT USE THIS TESTS AS SAMPLES TO BUILD YOUR STUFF !
 //
 //  I use some things here, that are not needed in a "real" implementation
 //
 private static opennlp.tools.tokenize.TokenizerModel OpenJavaModel(string fileName)
 {
     java.io.FileInputStream inputStream = null;
     try {
         inputStream = OpenNLP.OpenInputStream(fileName);
         return(new opennlp.tools.tokenize.TokenizerModel(inputStream));
     } finally {
         if (inputStream != null)
         {
             inputStream.close();
         }
     }
 }
 private static JavaModel OpenJavaModel()
 {
     return(new JavaModel(OpenNLP.OpenInputStream("opennlp/models/en-sent.bin")));
 }
Example #5
0
 private static opennlp.tools.chunker.ChunkSampleStream JavaSampleStream()
 {
     return(new opennlp.tools.chunker.ChunkSampleStream(
                new opennlp.tools.util.PlainTextByLineStream(
                    OpenNLP.OpenInputStream("opennlp/tools/chunker/test.txt"), "utf-8")));
 }
Example #6
0
 private static JavaModel OpenJavaModel()
 {
     return(new JavaModel(OpenNLP.OpenInputStream(ModeFile)));
 }