Beispiel #1
0
 public Document AnalyzeFile(FileStats file)
 {
     Log.Logger.Debug(string.Format("Analyzing the file: {0}", file.FilePath));
     string[] content;
     if (IOTools.ReadAllLines(file.FilePath, out content))
     {
         // Build composite tree
         TokenizerWithStats tknz = new TokenizerWithStats();
         Document           root = new Document {
             Name = file.FileName
         };
         foreach (string paragraph in content)
         {
             Item para = new Paragraph();
             root.AddItem(para);
             foreach (Token token in tknz.Enumerate(paragraph))
             {
                 para.AddItem(token);
             }
         }
         return(lexer.AnalyzeText(root));
     }
     else
     {
         return(null);
     }
 }
Beispiel #2
0
        public IItem Compose(string[] content)
        {
            // Build composite tree
            TokenizerWithStats tknz = new TokenizerWithStats();
            Document           root = new Document();

            foreach (string paragraph in content)
            {
                Item para = new Paragraph();
                root.AddItem(para);
                foreach (Token token in tknz.Enumerate(paragraph))
                {
                    para.AddItem(token);
                }
            }
            return(root);
        }