public CoreDocument ToDocument(IItemSource itemSource, ServiceController t)
        {
            var document = new CoreDocument(itemSource, this, ToId(t), ToName(t), ToType(t));

            document.Store("id", t.ServiceName.ToString());
            return(document);
        }
Ejemplo n.º 2
0
        public CoreDocument ToDocument(IItemSource itemSource, T t)
        {
            var document = new CoreDocument(itemSource, this, ToId(t), ToName(t), ToType(t));

            document.Store("fullname", t.GetType().FullName);
            return(document);
        }
Ejemplo n.º 3
0
        public CoreDocument ToDocument(IItemSource itemSource, TextItem t)
        {
            var document = new CoreDocument(itemSource, this, ToId(t), ToName(t), ToType(t));

            document.Store("id", t.Text);
            return(document);
        }
Ejemplo n.º 4
0
        public CoreDocument ToDocument(IItemSource itemSource, Task t)
        {
            var document = new CoreDocument(itemSource, this, ToId(t), ToName(t), ToType(t));

            document.Store("filename", t.FileName);
            return(document);
        }
        private static void ExtractKeyPhrases(CoreDocument coredoc, int id)
        {
            ArrayList sents = _analyzer.GetSents(coredoc);

            if (sents != null)
            {
                List <string> NP = new List <string>();
                for (int i = 0; i < sents.size(); i++)
                {
                    CoreMap     sentence   = (CoreMap)sents.get(i);
                    List <Tree> smallTrees = NPExtractor.getKeyPhrases((Tree)sentence.get(typeof(TreeCoreAnnotations.TreeAnnotation))).ToList();
                    foreach (var tree in smallTrees)
                    {
                        List leaves   = tree.getLeaves();
                        var  objarray = leaves.toArray();
                        //foreach (var obj in objarray)
                        //{
                        //    NP.Add(obj.ToString());
                        //}
                        string joinedNP = String.Join(" ", objarray);
                        NP.Add(joinedNP);
                    }
                }
                NounPhrases.Add(id, NP);
            }
        }
        public async Task <List <MCQuote> > GetQuotes(string data)
        {
            List <MCQuote> quotes = new List <MCQuote>();

            try
            {
                var curDir = Environment.CurrentDirectory;
                Directory.SetCurrentDirectory(_modelPath);

                CoreDocument doc = new CoreDocument(data);

                _pipeline.annotate(doc);

                for (int i = 0; i < doc.quotes().size(); i++)
                {
                    try
                    {
                        CoreQuote q = (CoreQuote)doc.quotes().get(i);
                        quotes.Add(new MCQuote(q));
                    }
                    catch (Exception e)
                    { }
                }

                Directory.SetCurrentDirectory(curDir);
            }
            catch (Exception e)
            {
                throw new SpanishCoreNLPQuoteException(e.Message, e);
            }
            return(quotes);
        }
Ejemplo n.º 7
0
        public CoreDocument ToDocument(IItemSource itemSource, Win32_Process t)
        {
            var document = new CoreDocument(itemSource, this, ToId(t), ToName(t), ToType(t))
                           .Store("id", t.ProcessId.ToString())
                           .SetItemIdForLearning(ToName(t));

            return(document);
        }
Ejemplo n.º 8
0
        public CoreDocument ToDocument(IItemSource itemSource, FileInfo fileInfo)
        {
            var document = new CoreDocument(itemSource, this, ToId(fileInfo), ToName(fileInfo), ToType(fileInfo));

            document.Store("filename", Path.GetFileNameWithoutExtension(fileInfo.Name));
            document.Store("filepath", fileInfo.FullName);
            return(document);
        }
Ejemplo n.º 9
0
        public IItem FromDocumentToItem(CoreDocument document)
        {
            var book = new Book();

            book.Title   = document.GetString("title");
            book.Authors = document.GetString("authors");
            book.Id      = int.Parse(document.GetString("id"));
            book.Formats.AddRange(document.GetStringList("format"));

            return(book);
        }
        public CoreDocument ToDocument(IItemSource itemSource, OneNotePage t)
        {
            var coreDoc = new CoreDocument(itemSource, this, t.Id, t.SectionName + " - " + t.Name, "onenotepage");

            coreDoc.Store("id", t.Id)
            .Store("name", t.Name)
            .Store("sectionnodepath", t.SectionNodePath)
            .Store("sectionname", t.SectionName);

            return(coreDoc);
        }
        public IItem FromDocumentToItem(CoreDocument document)
        {
            var page = new OneNotePage()
            {
                Id              = document.GetString("id"),
                Name            = document.GetString("name"),
                SectionNodePath = document.GetString("sectionnodepath"),
                SectionName     = document.GetString("sectionname")
            };

            return(page);
        }
Ejemplo n.º 12
0
        public IItem FromDocumentToItem(CoreDocument document)
        {
            var fullname = document.GetString("fullname");
            var export   =
                Items.SingleOrDefault(c => c.GetType().FullName == fullname);

            if (export == null)
            {
                throw new InvalidOperationException(string.Format("Missing Command {0}", fullname));
            }
            return(DocumentFromClass(export));
        }
Ejemplo n.º 13
0
        public CoreDocument ToDocument(IItemSource itemSource, Book t)
        {
            var coreDoc = new CoreDocument(itemSource, this, ToId(t), ToName(t), ToType(t));

            coreDoc.Store("title", t.Title)
            .Store("authors", t.Authors)
            .Store("id", t.Id.ToString())
            .Store("format", t.Formats.ToArray())
            .SetItemIdForLearning(ToName(t));

            return(coreDoc);
        }
Ejemplo n.º 14
0
        public static ProcessedEntity Classify(this StanfordCoreNLP nlp, string source)
        {
            CoreDocument document = new CoreDocument(source);

            nlp.annotate(document);

            return(document.sentences()
                   .toArray()
                   .OfType <CoreSentence>()
                   .Select(s => new ParsedSentence(s))
                   .Aggregate(new ProcessedEntity(), (r, s) => ProcessedEntity.Union(r, s.ToProcessedEntity())));
        }
 public static CoreDocument GetAnnotatedDocument(string text)
 {
     if (!String.IsNullOrEmpty(text) && !String.IsNullOrWhiteSpace(text))
     {
         CoreDocument coredoc = new CoreDocument(text);
         pipe.annotate(coredoc);
         return(coredoc);
     }
     else
     {
         return(null);
     }
 }
Ejemplo n.º 16
0
        public Analyzer(Properties properties, string jarRootPath, string text)
        {
            var curDir = Environment.CurrentDirectory;

            Directory.SetCurrentDirectory(jarRootPath);
            pipeline = new StanfordCoreNLP(properties.GetUnderlyingModel());
            Directory.SetCurrentDirectory(curDir);

            coreDocument = new CoreDocument(text);
            this.text    = text;

            treeParser          = new TreeParser();
            semanticGraphParser = new SemanticGraphParser();
        }
        public ArrayList GetSents(CoreDocument coredoc)
        {
            ArrayList sents = new ArrayList();

            if (coredoc != null)
            {
                sents = (ArrayList)coredoc.annotation().get(typeof(CoreAnnotations.SentencesAnnotation));
                return(sents);
            }
            else
            {
                return(null);
            }
        }
        public void CreateParseTree(CoreDocument coredoc)
        {
            if (coredoc != null)
            {
                ArrayList sents = (ArrayList)coredoc.annotation().get(typeof(CoreAnnotations.SentencesAnnotation));
                for (int i = 0; i < sents.size(); i++)
                {
                    CoreMap sentence = (CoreMap)sents.get(i);

                    this.constituencyParse = (Tree)sentence.get(typeof(TreeCoreAnnotations.TreeAnnotation));

                    Set treeConstituents = (Set)constituencyParse.constituents(new LabeledScoredConstituentFactory());
                    treeArray = treeConstituents.toArray();
                }
            }
        }
        public static bool DetectQuestion(CoreDocument coredoc)
        {
            _analyzer.CreateParseTree(coredoc);
            int index = 0;

            while (index < _analyzer.treeArray.Length)
            {
                Constituent constituent = (Constituent)_analyzer.treeArray[index];

                if (constituent.label() != null && (constituent.label().toString().Equals("SQ") || constituent.label().toString().Equals("SBARQ")))
                {
                    return(true);
                }
                index++;
            }
            return(false);
        }
 public static void Extract()
 {
     if (LuceneService.DirReader != null)
     {
         //This is actually pathetic. Please let me leave and enjoy my shitty code, thank you.
         for (int i = 0; i < LuceneService.DirReader.MaxDoc; i++)
         {
             Lucene.Net.Documents.Document document = LuceneService.DirReader.Document(i);
             CoreDocument coredoc = GetAnnotatedDocument(document.GetField(ProjectInfo.TextFieldKey).GetStringValue());
             ExtractNERTags(coredoc, document);
             //IsQuestionList.Add(document.GetField("id").GetStringValue(), DetectQuestion(coredoc));
             if (DetectQuestion(coredoc))
             {
                 IsQuestionList.Add(document.GetField("id").GetInt32Value().Value);
             }
             ExtractKeyPhrases(coredoc, document.GetField("id").GetInt32Value().Value);
             System.Console.WriteLine(i);
         }
     }
 }
Ejemplo n.º 21
0
        private void LearnCommandForInput(IndexWriter writer, IndexReader reader, DocumentId completionId, string input)
        {
            // fickle command, isn't learnable
            if (completionId == null)
            {
                return;
            }

            var document = CoreDocument.Rehydrate(PopDocument(writer, reader, completionId.GetId()));

            if (document == null)
            {
                throw new InvalidOperationException(string.Format("Didn't find command {0}", completionId));
            }

            var learnings = _learningRepository.LearnFor(input, completionId.GetLearningId());

            document.SetLearnings(learnings);

            writer.AddDocument(document);
        }
        private static void ExtractNouns(CoreDocument coredoc, Lucene.Net.Documents.Document document)
        {
            List <string> nouns = new List <string>();

            for (int i = 0; i < coredoc.sentences().size(); i++)
            {
                CoreSentence sent = (CoreSentence)coredoc.sentences().get(i);
                for (int j = 0; j < sent.tokens().size(); j++)
                {
                    // Condition: if the word is a noun (posTag starts with "NN")
                    if (sent.posTags() != null && sent.posTags().get(j) != null)
                    {
                        string posTags = sent.posTags().get(j).ToString();
                        if (posTags.Contains("NN"))
                        {
                            var noun = sent.tokens().get(j).ToString();
                            noun = noun.Remove(noun.Length - 2);
                            nouns.Add(noun);
                        }
                    }
                }
            }
            NounPhrases.Add(document.GetField("id").GetInt32Value().Value, nouns);
        }
        public AutoCompletionResult Autocomplete(string text, bool includeExplanation = false)
        {
            if (string.IsNullOrWhiteSpace(text))
            {
                return(AutoCompletionResult.NoResult(text));
            }

            var searchers = _directoryFactory.GetAllDirectories().Select(d =>
            {
                try
                {
                    return(new IndexSearcher(d, true));
                }
                catch (Exception e)
                {
                    _log.Error(e, "While searching directory {0}", d);
                    return(null);
                }
            })
                            .Where(s => s != null)
                            .ToArray();

            using (var searcher = new MultiSearcher(searchers))
            {
                try
                {
                    BooleanQuery query = GetQueryForText(text);

                    var results  = searcher.Search(query, 10);
                    var commands = results.ScoreDocs
                                   .Select(d =>
                    {
                        var document = searcher.Doc(d.Doc);
                        try
                        {
                            Explanation explanation = null;
                            if (includeExplanation)
                            {
                                explanation = searcher.Explain(query, d.Doc);
                            }
                            var coreDoc = CoreDocument.Rehydrate(document);
                            var command = _converterRepository.FromDocumentToItem(coreDoc);

                            return(new AutoCompletionResult.CommandResult(command, coreDoc.GetDocumentId(), explanation));
                        }
                        catch (Exception e)
                        {
                            _log.Error(e, "Error getting command result for document {0}:{1}",
                                       document.GetField(SpecialFields.ConverterId).StringValue,
                                       document.GetField(SpecialFields.Id).StringValue);
                            return(null);
                        }
                    })
                                   .Where(r => r != null);
                    return(AutoCompletionResult.OrderedResult(text, commands));
                }
                catch (ParseException e)
                {
                    _log.Error(e, "Error parsing '{0}'", text);
                    return(AutoCompletionResult.NoResult(text));
                }
            }
        }
Ejemplo n.º 24
0
 public IItem FromDocumentToItem(CoreDocument coreDoc)
 {
     return(GetConverterForId(coreDoc.ConverterId).FromDocumentToItem(coreDoc));
 }
Ejemplo n.º 25
0
        public static void Main(string[] args)
        {
            // set up pipeline properties
            Properties props = new Properties();

            // set the list of annotators to run
            props.SetProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse,depparse,coref,kbp,quote");
            // set a property for an annotator, in this case the coref annotator is being set to use the neural algorithm
            props.SetProperty("coref.algorithm", "neural");
            // build pipeline
            StanfordCoreNLP pipeline = new StanfordCoreNLP(props);
            // create a document object
            CoreDocument document = new CoreDocument(text);

            // annnotate the document
            pipeline.Annotate(document);
            // examples
            // 10th token of the document
            CoreLabel token = document.Tokens()[10];

            System.Console.Out.WriteLine("Example: token");
            System.Console.Out.WriteLine(token);
            System.Console.Out.WriteLine();
            // text of the first sentence
            string sentenceText = document.Sentences()[0].Text();

            System.Console.Out.WriteLine("Example: sentence");
            System.Console.Out.WriteLine(sentenceText);
            System.Console.Out.WriteLine();
            // second sentence
            CoreSentence sentence = document.Sentences()[1];
            // list of the part-of-speech tags for the second sentence
            IList <string> posTags = sentence.PosTags();

            System.Console.Out.WriteLine("Example: pos tags");
            System.Console.Out.WriteLine(posTags);
            System.Console.Out.WriteLine();
            // list of the ner tags for the second sentence
            IList <string> nerTags = sentence.NerTags();

            System.Console.Out.WriteLine("Example: ner tags");
            System.Console.Out.WriteLine(nerTags);
            System.Console.Out.WriteLine();
            // constituency parse for the second sentence
            Tree constituencyParse = sentence.ConstituencyParse();

            System.Console.Out.WriteLine("Example: constituency parse");
            System.Console.Out.WriteLine(constituencyParse);
            System.Console.Out.WriteLine();
            // dependency parse for the second sentence
            SemanticGraph dependencyParse = sentence.DependencyParse();

            System.Console.Out.WriteLine("Example: dependency parse");
            System.Console.Out.WriteLine(dependencyParse);
            System.Console.Out.WriteLine();
            // kbp relations found in fifth sentence
            IList <RelationTriple> relations = document.Sentences()[4].Relations();

            System.Console.Out.WriteLine("Example: relation");
            System.Console.Out.WriteLine(relations[0]);
            System.Console.Out.WriteLine();
            // entity mentions in the second sentence
            IList <CoreEntityMention> entityMentions = sentence.EntityMentions();

            System.Console.Out.WriteLine("Example: entity mentions");
            System.Console.Out.WriteLine(entityMentions);
            System.Console.Out.WriteLine();
            // coreference between entity mentions
            CoreEntityMention originalEntityMention = document.Sentences()[3].EntityMentions()[1];

            System.Console.Out.WriteLine("Example: original entity mention");
            System.Console.Out.WriteLine(originalEntityMention);
            System.Console.Out.WriteLine("Example: canonical entity mention");
            System.Console.Out.WriteLine(originalEntityMention.CanonicalEntityMention().Get());
            System.Console.Out.WriteLine();
            // get document wide coref info
            IDictionary <int, CorefChain> corefChains = document.CorefChains();

            System.Console.Out.WriteLine("Example: coref chains for document");
            System.Console.Out.WriteLine(corefChains);
            System.Console.Out.WriteLine();
            // get quotes in document
            IList <CoreQuote> quotes = document.Quotes();
            CoreQuote         quote  = quotes[0];

            System.Console.Out.WriteLine("Example: quote");
            System.Console.Out.WriteLine(quote);
            System.Console.Out.WriteLine();
            // original speaker of quote
            // note that quote.speaker() returns an Optional
            System.Console.Out.WriteLine("Example: original speaker of quote");
            System.Console.Out.WriteLine(quote.Speaker().Get());
            System.Console.Out.WriteLine();
            // canonical speaker of quote
            System.Console.Out.WriteLine("Example: canonical speaker of quote");
            System.Console.Out.WriteLine(quote.CanonicalSpeaker().Get());
            System.Console.Out.WriteLine();
        }
Ejemplo n.º 26
0
 public IItem FromDocumentToItem(CoreDocument document)
 {
     return(new FileInfoItem(new FileInfo(document.GetString("filepath"))));
 }
        private static void ExtractNERTags(CoreDocument coredoc, Lucene.Net.Documents.Document document)
        {
            //I have no clue as to why NER-tagged messages are stored like that. I guess there is some deep idea behind copying the same info over and over again (or, most likely, this is because some documents have more than one sentence. even tho its stil really stupid)
            if (coredoc != null)
            {
                List nerList = coredoc.entityMentions();
                if (nerList.size() > 0)
                {
                    for (int j = 0; j < nerList.size(); j++)
                    {
                        CoreEntityMention em = (CoreEntityMention)nerList.get(j);
                        //Does this need to be a switch case?
                        if (em.entityType() == "DATE")
                        {
                            var datekey = document.GetField("id").GetInt32Value().Value;
                            if (!DateList.ContainsKey(datekey))
                            {
                                DateList.Add(datekey, em.text());
                            }
                            else
                            {
                                DateList.TryUpdate(datekey, DateList[datekey] + ", " + em.text());
                            }
                        }
                        if (em.entityType() == "TIME")
                        {
                            var timekey = document.GetField("id").GetInt32Value().Value;
                            if (!TimeList.ContainsKey(timekey))
                            {
                                TimeList.Add(timekey, em.text());
                            }
                            else
                            {
                                TimeList.TryUpdate(timekey, TimeList[timekey] + ", " + em.text());
                            }
                        }

                        if (em.entityType() == "LOCATION")
                        {
                            var lockey = document.GetField("id").GetInt32Value().Value;
                            if (!LocList.ContainsKey(lockey))
                            {
                                LocList.Add(lockey, em.text());
                            }
                            else
                            {
                                LocList.TryUpdate(lockey, LocList[lockey] + ", " + em.text());
                            }
                        }
                        if (em.entityType() == "ORGANIZATION")
                        {
                            var orgkey = document.GetField("id").GetInt32Value().Value;
                            if (!OrgList.ContainsKey(orgkey))
                            {
                                OrgList.Add(orgkey, em.text());
                            }
                            else
                            {
                                OrgList.TryUpdate(orgkey, OrgList[orgkey] + ", " + em.text());
                            }
                        }

                        if (em.entityType() == "URL")
                        {
                            var urlkey = document.GetField("id").GetInt32Value().Value;
                            if (!URLList.ContainsKey(urlkey))
                            {
                                URLList.Add(urlkey, em.text());
                            }
                            else
                            {
                                URLList.TryUpdate(urlkey, OrgList[urlkey] + ", " + em.text());
                            }
                        }
                    }
                }
            }
        }
        public IItem FromDocumentToItem(CoreDocument document)
        {
            var name = document.GetString("id");

            return(new ServiceTypedItem(ServiceController.GetServices().Single(s => s.ServiceName == name)));
        }
Ejemplo n.º 29
0
        static void Main(string[] args)
        {
            var jarRoot = @"..\..\..\es\";

            /* CultureInfo ci = new CultureInfo("es-ES");
            *  Thread.CurrentThread.CurrentCulture = ci;
            *  Thread.CurrentThread.CurrentUICulture = ci;*/
            // Text for processing
            var text = "\"O pior erro que poderíamos cometer era, à boleia do crescimento económico, termos a ilusão de que os problemas estruturais da zona euro ficaram resolvidos\", defende António Costa, que deu uma entrevista ao Público que será publicada este domingo e onde o primeiro-ministro diz que a moeda única foi um bonus para a economia alemã.";


            string sb = "António Costa defende que, na criação da moeda única, houve um \"excesso de voluntarismo político\" e nem todos terão percebido que \"o euro foi o maior bónus à competitividade da economia alemã que a Europa lhe poderia ter oferecido\".";

            var propsFile = Path.Combine(jarRoot, "StanfordCoreNLP-spanish.properties");
            // Annotation pipeline configuration
            var props = new Properties();

            // props.setProperty("annotators", "tokenize, ssplit, pos, lemma, ner, depparse, kbp");
            props.load(new FileReader(propsFile));
            props.put("ner.useSUTime", "0");
            props.put("tokenize.verbose", "true");
            // props.setProperty("annotators", "tokenize, ssplit, pos, lemma, ner, depparse, kbp, coref,entitymentions,quote1");
            props.setProperty("annotators", "tokenize, ssplit, pos, lemma, ner, depparse, kbp, coref,entitymentions, quote");
            //props.setProperty("customAnnotatorClass.quote1", "edu.stanford.nlp.pipeline.QuoteAnnotator");
            //props.setProperty("quote1.attributeQuotes", "false");

            /*String modPath = @"C:\Users\anacorreia\source\repos\ConsoleApp1\es\";
             * props.put("pos.model", modPath + @"edu\stanford\nlp\models\pos-tagger\spanish\spanish-ud.tagger");
             * props.put("tokenize.language","es");
             *
             * props.put("ner.model", modPath + @"edu\stanford\nlp\models\ner\spanish.ancora.distsim.s512.crf.ser.gz");
             * props.put("ner.applyNumericClassifiers", "1");
             *
             * props.put("ner.language", "1");
             * props.put("sutime.language", "1");
             *
             * props.put("parse.model", modPath + @"edu\stanford\nlp\models\lexparser\spanishPCFG.ser.gz");
             * props.put("depparse.model", modPath + @"edu\stanford\nlp\models\parser\nndep\UD_Spanish.gz");
             * props.put("depparse.language", "spanish");
             *
             * props.put("ner.fine.regexner.mapping", modPath + @"edu\stanford\nlp\models\kbp\spanish\gazetteers\");
             * props.put("ner.fine.regexner.validpospattern", "^(NOUN|ADJ|PROPN).*");
             * props.put("ner.fine.regexner.ignorecase", "1");
             * props.put("ner.fine.regexner.noDefaultOverwriteLabels", "CITY,COUNTRY,STATE_OR_PROVINCE");
             *
             * props.put("kbp.semgrex", modPath + @"edu\stanford\nlp\models\kbp\spanish\semgrex");
             * props.put("kbp.tokensregex", modPath + @"edu\stanford\nlp\models\kbp\spanish\tokensregex");
             * props.put("kbp.model", "none");
             * props.put("kbp.language", "es");
             *
             * props.put("entitylink.caseless", "1");
             * props.put("entitylink.wikidict", modPath + @"edu\stanford\nlp\models\kbp\spanish\wikidict_spanish.tsv");
             */

            // We should change current directory, so StanfordCoreNLP could find all the model files automatically
            var curDir = Environment.CurrentDirectory;

            Directory.SetCurrentDirectory(jarRoot);
            var pipeline = new StanfordCoreNLP(props);


            // Annotation

            CoreDocument testDoc = new CoreDocument(sb);

            pipeline.annotate(testDoc);

            // var annotation = new Annotation(sb);
            //pipeline.annotate(annotation);

            string sxc = "\"O pior erro que poderíamos cometer era, à boleia do crescimento económico, termos a ilusão de que os problemas estruturais da zona euro ficaram resolvidos\", defende António Costa, que deu uma entrevista ao Público que será publicada este domingo e onde o primeiro-ministro diz que a moeda única foi um \"bónus\" para a economia alemã. Numa entrevista em que, a julgar pelo excerto publicado para já, se focou essencialmente em questões europeias, e não na política interna, o primeiro-ministro mostrou-se, também, favorável à introdução de impostos europeus." +
                         "António Costa defende que, na criação da moeda única, houve um \"excesso de voluntarismo político\" e nem todos terão percebido que \"o euro foi o maior bónus à competitividade da economia alemã que a Europa lhe poderia ter oferecido\". Agora, a menos que se tomem medidas de correção das assimetrias, \"a zona euro será mais uma vez confrontada com uma crise como a que vivemos agora\"." +
                         "O objetivo de todos os líderes europeus deve ser evitar que se volte a cometer \"o erro que nos acompanhou desde 2000 até 2011\", isto é, marcar passo nas reformas. E uma das principais ferramentas de que a zona euro necessita é de um orçamento da zona euro destinado a financiar reformas para acelerar a convergência das economias." +
                         "Com a saída do Reino Unido e com a necessidade de investir na segurança, na defesa e na ciência, António Costa defende que \"ou estamos disponíveis para sacrificar a parte do Orçamento afeta às políticas de coesão e à PAC, ou temos de encontrar outras fontes de receita\". Onde? Mais contribuições dos Estados, isto é, \"mais impostos dos portugueses\", ou receitas próprias criadas pela União, nomeadamente através de impostos europeus.";

            CoreDocument testDoc3 = new CoreDocument(sxc);

            pipeline.annotate(testDoc3);


            // Result - Pretty Print
            using (var stream = new ByteArrayOutputStream())
            {
                pipeline.prettyPrint(testDoc.annotation(), new PrintWriter(stream));
                Console.WriteLine("Stream testDoc3 " + stream.toString());
                stream.close();
            }

            using (var stream = new ByteArrayOutputStream())
            {
                pipeline.prettyPrint(testDoc3.annotation(), new PrintWriter(stream));
                Console.WriteLine("Stream testDoc3 " + stream.toString());
                stream.close();
            }



            Directory.SetCurrentDirectory(curDir);
        }
Ejemplo n.º 30
0
        public IItem FromDocumentToItem(CoreDocument document)
        {
            var id = int.Parse(document.GetString("id"));

            return(new ProcessItem(Process.GetProcessById(id)));
        }