// [Explicit-Dispose] public void Dispose() { if (_wn != null) { _wn.Dispose(); } _tokenizer = null; _sentenceDetector = null; _posTagger = null; _chunker = null; // Dispose CLI/C++ Dll ap = null; // Dispose all KB plugins if (PlugInsNumber > 0) { for (int i = 0; i < PlugInsNumber; i++) { KBDrivers[i] = null; KBDriversQueryPointers[i] = null; } } }
private void initComponents() { sentenceDetector = new EnglishMaximumEntropySentenceDetector(Path.Combine(ModelDir, "EnglishSD.nbin")); tokenizer = new EnglishMaximumEntropyTokenizer(Path.Combine(ModelDir, "EnglishTok.nbin")); posTagger = new EnglishMaximumEntropyPosTagger(Path.Combine(ModelDir, "EnglishPOS.nbin")); chunker = new EnglishTreebankChunker(Path.Combine(ModelDir, "EnglishChunk.nbin")); parser = new EnglishTreebankParser(FileUtils.WithSeparator(ModelDir), true, false); }
private string ChunkSentence(string[] tokens, string[] tags) { if (_chunker == null) { _chunker = new EnglishTreebankChunker(_modelPath + "EnglishChunk.nbin"); } return(string.Join(" ", _chunker.GetChunks(tokens, tags))); }
public static string ChunkSentence(string[] tokens, string[] tags) { if (mChunker == null) { mChunker = new OpenNLP.Tools.Chunker.EnglishTreebankChunker(mModelPath + "EnglishChunk.nbin"); } return(mChunker.GetChunks(tokens, tags)); }
internal string ChunkSentence(string[] tokens, string[] tags) { if (_mChunker == null) { _mChunker = new EnglishTreebankChunker(_modelPath + "EnglishChunk.nbin"); } return(_mChunker.GetChunks(tokens, tags)); }
public void Setup() { var path = Path.Combine(TestContext.CurrentContext.TestDirectory, @"..\..\..\Resources\Models\"); sentenceDetector = new EnglishMaximumEntropySentenceDetector(Path.Combine(path, "EnglishSD.nbin")); postTagger = new EnglishMaximumEntropyPosTagger( Path.Combine(path, @"EnglishPOS.nbin"), Path.Combine(path, @"Parser\tagdict")); tokenizer = new EnglishMaximumEntropyTokenizer(Path.Combine(path, "EnglishTok.nbin")); chunker = new EnglishTreebankChunker(Path.Combine(path, @"EnglishChunk.nbin")); }
private List <SentenceChunk> MyChunker(string str) { var modelPath = Path.GetDirectoryName(Process.GetCurrentProcess().MainModule.FileName) + @"\Models\EnglishChunk.nbin"; var chunker = new EnglishTreebankChunker(modelPath); var tokens = MyTokenizer(str); var pos = MyPosTagger(str); var chunks = chunker.GetChunks(tokens, pos); return(chunks); }
// Defualt Constructor public SemCluster(string DataFolder) { try { Console.WriteLine("\tSemCluster Text Analytics Tool"); Console.WriteLine("\t------------------------------"); Console.WriteLine("\t-Wikipedia local server couldn't be found!"); Console.WriteLine("\t-Seeds SemAve is in manual mode!"); Console.WriteLine(); Console.WriteLine(); Console.WriteLine("-> Resources loading ..."); Console.WriteLine(); #region Loading External Resources _wn = new WordNetEngine(DataFolder + "WordNet", InMemoryWordNet); _tokenizer = new EnglishRuleBasedTokenizer(TokenizeHyphen); _sentenceDetector = new EnglishMaximumEntropySentenceDetector(DataFolder + "EnglishSD.nbin"); _posTagger = new EnglishMaximumEntropyPosTagger(DataFolder + "EnglishPOS.nbin", DataFolder + "\\Build\\tagdict"); _chunker = new EnglishTreebankChunker(DataFolder + "EnglishChunk.nbin"); #endregion PlugInsManager(DataFolder); Console.WriteLine("\tResources loaded successfully"); Console.WriteLine("\t" + PlugInsNumber + " KB plug-ins found in the repository"); Console.WriteLine("\tPress any key to continue ..."); Console.ReadKey(); Console.WriteLine(); RootVirtualNode = _wn.GetSynSet("Noun:1740"); ap = new AffinityPropagationClustering(); SynSetRelationTypes = new WordNetApi.Core.WordNetEngine.SynSetRelation[2]; SynSetRelationTypes[0] = WordNetApi.Core.WordNetEngine.SynSetRelation.Hypernym; SynSetRelationTypes[1] = WordNetApi.Core.WordNetEngine.SynSetRelation.InstanceHypernym; } catch (Exception ex) { Dispose(); throw new Exception(ex.Message); } }