Esempio n. 1
0
        private Parse ParseSentence(string sentence)
        {
            if (_parser == null)
            {
                _parser = new OpenNLP.Tools.Parser.EnglishTreebankParser(_modelPath, true, false);
            }

            return(_parser.DoParse(sentence));
        }
Esempio n. 2
0
        private static void Main(string[] args)
        {
            /*FileStream ostrm;
            StreamWriter writer;
            TextWriter oldOut = Console.Out;
            try
            {
                ostrm = new FileStream("C:\\Users\\Alexandre\\Desktop\\vs_output_2.txt", FileMode.OpenOrCreate, FileAccess.Write);
                writer = new StreamWriter(ostrm);
            }
            catch (Exception e)
            {
                Console.WriteLine("Cannot open Redirect.txt for writing");
                Console.WriteLine(e.Message);
                return;
            }
            Console.SetOut(writer);*/

            /*// read file
            var tokenizerTrainingFilePath = currentDirectory + "Input/tokenizer.train";
            var outputFilePath = currentDirectory + "Output/EnglishTok.nbin";
            MaximumEntropyTokenizer.Train(tokenizerTrainingFilePath, outputFilePath);*/

            // test detokenization
            /*var tokens = new List<string>() {"do", "n't", "commit"};
            var detokenizer = new DictionaryDetokenizer();
            var result = detokenizer.Detokenize(tokens.ToArray());
            Console.WriteLine(result);*/

            /*// train model file
            var inputFilePath = currentDirectory + "Input/sentences.train";
            var outputFilePath = currentDirectory + "Output/" + Path.GetFileNameWithoutExtension(inputFilePath) + ".nbin";
            var iterations = 100;
            var cut = 5;
            var endOfSentenceScanner = new CharactersSpecificEndOfSentenceScanner();
            Console.WriteLine("Training model...");
            var model = MaximumEntropySentenceDetector.TrainModel(inputFilePath, iterations, cut, endOfSentenceScanner);
            Console.WriteLine("Writing output file '{0}'...", outputFilePath);
            new BinaryGisModelWriter().Persist(model, outputFilePath);
            Console.WriteLine("Output file written.");*/

            // detect tokenization issues
            /*var pathToFile = currentDirectory + "Input/tokenizerIssues.txt";
            var modelPath = currentDirectory + "../Resources/Models/";
            var tokenizer = new EnglishMaximumEntropyTokenizer(modelPath + "EnglishTok.nbin");
            var allLines = File.ReadAllLines(pathToFile);
            foreach (var line in allLines)
            {
                var tokens = tokenizer.Tokenize(line);
                Console.WriteLine(string.Join(" | ", tokens));
            }*/

            // parsing
            //var sentence = "If she wakes up, and she's a little more hurt, or she's a little more concerned that she might not make it out of there in one piece, and I had something to pick a lock with...";
            //var sentence = "that you can do what you set out to do";
            //var sentence = "This is a test.";
            var sentence = "The kids actually get involved in mapping out where the community center should be";
            var tokenizer = new EnglishMaximumEntropyTokenizer(currentDirectory + "../Resources/Models/EnglishTok.nbin");
            var tokens = tokenizer.Tokenize(sentence);
            var modelPath = currentDirectory + "../Resources/Models/";
            var parser = new EnglishTreebankParser(modelPath, true, false);
            var parse = parser.DoParse(tokens);
            // Extract dependencies from lexical tree
            var tlp = new PennTreebankLanguagePack();
            var gsf = tlp.GrammaticalStructureFactory();
            var tree = new ParseTree(parse);
            Console.WriteLine(tree);
            var gs = gsf.NewGrammaticalStructure(tree);
            var dependencies = gs.TypedDependencies();

            foreach (var dep in dependencies)
            {
                Console.WriteLine(dep);
            }

            Console.WriteLine("===========");
            Console.WriteLine("OK");
            Console.ReadKey();
        }
Esempio n. 3
0
        private void ShowParse()
        {
            if (txtInput.Text.Length == 0)
            {
                return;
            }

            //prepare the UI
            txtInput.Enabled = false;
            btnParse.Enabled = false;
            this.Cursor = Cursors.WaitCursor;

            lithiumControl.NewDiagram();

            //do the parsing
            if (mParser == null)
            {
                mParser = new EnglishTreebankParser(mModelPath, true, false);
            }
            mParse = mParser.DoParse(txtInput.Text);

            if (mParse.Type == MaximumEntropyParser.TopNode)
            {
                mParse = mParse.GetChildren()[0];
            }

            //display the parse result
            ShapeBase root = this.lithiumControl.Root;
            root.Text = mParse.Type;
            root.Visible = true;

            AddChildNodes(root, mParse.GetChildren());
            root.Expand();

            this.lithiumControl.DrawTree();

            //restore the UI
            this.Cursor = Cursors.Default;
            txtInput.Enabled = true;
            btnParse.Enabled = true;
        }
Esempio n. 4
0
 private void initComponents()
 {
     sentenceDetector = new EnglishMaximumEntropySentenceDetector(Path.Combine(ModelDir, "EnglishSD.nbin"));
     tokenizer = new EnglishMaximumEntropyTokenizer(Path.Combine(ModelDir, "EnglishTok.nbin"));
     posTagger = new EnglishMaximumEntropyPosTagger(Path.Combine(ModelDir, "EnglishPOS.nbin"));
     chunker = new EnglishTreebankChunker(Path.Combine(ModelDir, "EnglishChunk.nbin"));
     parser = new EnglishTreebankParser(FileUtils.WithSeparator(ModelDir), true, false);
 }
Esempio n. 5
0
		private Parse ParseSentence(string sentence)
		{
			if (_parser == null)
			{
				_parser = new OpenNLP.Tools.Parser.EnglishTreebankParser(_modelPath, true, false);
			}

			return _parser.DoParse(sentence);
		}