/// <summary>
        /// this test compares search results when using and not using compound
        ///  files.
        /// 
        ///  TODO: There is rudimentary search result validation as well, but it is
        ///        simply based on asserting the output observed in the old test case,
        ///        without really knowing if the output is correct. Someone needs to
        ///        validate this output and make any changes to the checkHits method.
        /// </summary>
        public virtual void TestRun()
        {
            StringWriter sw = new StringWriter();
              PrintWriter pw = new PrintWriter(sw, true);
              int MAX_DOCS = atLeast(225);
              DoTest(random(), pw, false, MAX_DOCS);
              pw.close();
              sw.close();
              string multiFileOutput = sw.ToString();
              //System.out.println(multiFileOutput);

              sw = new StringWriter();
              pw = new PrintWriter(sw, true);
              DoTest(random(), pw, true, MAX_DOCS);
              pw.close();
              sw.close();
              string singleFileOutput = sw.ToString();

              Assert.AreEqual(multiFileOutput, singleFileOutput);
        }
        internal static string BuildDeadlockInfo()
        {
            ThreadMXBean threadBean = ManagementFactory.GetThreadMXBean();

            long[] threadIds = threadBean.FindMonitorDeadlockedThreads();
            if (threadIds != null && threadIds.Length > 0)
            {
                StringWriter stringWriter = new StringWriter();
                PrintWriter  @out         = new PrintWriter(stringWriter);
                ThreadInfo[] infos        = threadBean.GetThreadInfo(threadIds, true, true);
                foreach (ThreadInfo ti in infos)
                {
                    PrintThreadInfo(ti, @out);
                    PrintLockInfo(ti.GetLockedSynchronizers(), @out);
                    @out.WriteLine();
                }
                @out.Close();
                return(stringWriter.ToString());
            }
            else
            {
                return(null);
            }
        }
Example #3
0
        public virtual void TestVariance(double[] x)
        {
            int[]        batchSizes = new int[] { 10, 20, 35, 50, 75, 150, 300, 500, 750, 1000, 5000, 10000 };
            double[]     varResult;
            PrintWriter  file = null;
            NumberFormat nf   = new DecimalFormat("0.000E0");

            try
            {
                file = new PrintWriter(new FileOutputStream("var.out"), true);
            }
            catch (IOException e)
            {
                log.Info("Caught IOException outputing List to file: " + e.Message);
                System.Environment.Exit(1);
            }
            foreach (int bSize in batchSizes)
            {
                varResult = GetVariance(x, bSize);
                file.Println(bSize + "," + nf.Format(varResult[0]) + "," + nf.Format(varResult[1]) + "," + nf.Format(varResult[2]) + "," + nf.Format(varResult[3]));
                log.Info("Batch size of: " + bSize + "   " + varResult[0] + "," + nf.Format(varResult[1]) + "," + nf.Format(varResult[2]) + "," + nf.Format(varResult[3]));
            }
            file.Close();
        }
        /// <summary>
        /// Create the input file with all of the possible combinations of the
        /// given depth.
        /// </summary>
        /// <param name="fs">the filesystem to write into</param>
        /// <param name="dir">the directory to write the input file into</param>
        /// <param name="pent">the puzzle</param>
        /// <param name="depth">the depth to explore when generating prefixes</param>
        /// <exception cref="System.IO.IOException"/>
        private static long CreateInputDirectory(FileSystem fs, Path dir, Pentomino pent,
                                                 int depth)
        {
            fs.Mkdirs(dir);
            IList <int[]> splits = pent.GetSplits(depth);
            Path          input  = new Path(dir, "part1");
            PrintWriter   file   = new PrintWriter(new OutputStreamWriter(new BufferedOutputStream
                                                                              (fs.Create(input), 64 * 1024), Charsets.Utf8));

            foreach (int[] prefix in splits)
            {
                for (int i = 0; i < prefix.Length; ++i)
                {
                    if (i != 0)
                    {
                        file.Write(',');
                    }
                    file.Write(prefix[i]);
                }
                file.Write('\n');
            }
            file.Close();
            return(fs.GetFileStatus(input).GetLen());
        }
Example #5
0
 public virtual void saveHTKtailer(int nstates, PrintWriter fout)
 {
     fout.println(new StringBuilder().append("<TRANSP> ").append(nstates).toString());
     for (int i = 0; i < nstates; i++)
     {
         fout.print("0 ");
     }
     fout.println();
     for (int i = 1; i < nstates - 1; i++)
     {
         for (int j = 0; j < i; j++)
         {
             fout.print("0 ");
         }
         fout.print("0.5 0.5");
         for (int j = i + 3; j < nstates; j++)
         {
             fout.print("0 ");
         }
     }
     fout.println();
     fout.println("0 0 0");
     fout.println("<ENDHMM>");
 }
Example #6
0
        /*
         * public double getNormVariance(List<double[]> thisList){
         * double[] ratio = new double[thisList.size()];
         * double[] mean = new double[thisList.get(0).length];
         * double sizeInv = 1/( (double) thisList.size() );
         *
         * for(double[] arr:thisList){
         * for(int i=0;i<arr.length;i++){
         * mean[i] += arr[i]*sizeInv;
         * }
         * }
         *
         * double meanNorm = ArrayMath.norm(mean);
         *
         * for(int i=0;i<thisList.size();i++){
         * ratio[i] = (ArrayMath.norm(thisList.get(i))/ meanNorm);
         * }
         *
         * arrayToFile(ratio,"ratio.out");
         *
         * return ArrayMath.variance(ratio);
         *
         * }
         *
         * public double getSimVariance(List<double[]> thisList){
         *
         * double[] ang = new double[thisList.size()];
         * double[] mean = new double[thisList.get(0).length];
         * double sizeInv = 1/( (double) thisList.size() );
         *
         * for(double[] arr:thisList){
         * for(int i=0;i<arr.length;i++){
         * mean[i] += arr[i]*sizeInv;
         * }
         * }
         *
         * double meanNorm = ArrayMath.norm(mean);
         *
         * for(int i=0;i<thisList.size();i++){
         * ang[i] = ArrayMath.innerProduct(thisList.get(i),mean);
         * ang[i] = ang[i]/ ( meanNorm * ArrayMath.norm(thisList.get(i)));
         * }
         *
         * arrayToFile(ang,"angle.out");
         *
         * return ArrayMath.variance(ang);
         * }
         */
        public virtual void ListToFile(IList <double[]> thisList, string fileName)
        {
            PrintWriter  file = null;
            NumberFormat nf   = new DecimalFormat("0.000E0");

            try
            {
                file = new PrintWriter(new FileOutputStream(fileName), true);
            }
            catch (IOException e)
            {
                log.Info("Caught IOException outputing List to file: " + e.Message);
                System.Environment.Exit(1);
            }
            foreach (double[] element in thisList)
            {
                foreach (double val in element)
                {
                    file.Print(nf.Format(val) + "  ");
                }
                file.Println(string.Empty);
            }
            file.Close();
        }
        //
        // Java generator code
        //

        /**
         * Generates code to recreate the expression.
         *
         * @param @out the writer to the Java source code.
         */
        public override void generate(PrintWriter out)

        {
            if (_value == 0)
            {
                @out.print("LongValue.ZERO");
            }
            else if (_value == 1)
            {
                @out.print("LongValue.ONE");
            }
            else if (_value == -1)
            {
                @out.print("LongValue.MINUS_ONE");
            }
            else if (STATIC_MIN <= _value && _value <= STATIC_MAX)
            {
                @out.print("LongValue.STATIC_VALUES[" + (_value - STATIC_MIN) + "]");
            }
            else
            {
                @out.print("new LongValue(" + _value + "L)");
            }
        }
Example #8
0
        /// <summary>
        /// <inheritDoc/>
        ///
        /// </summary>
        /// <exception cref="System.IO.IOException"/>
        public override void Print(Annotation doc, OutputStream target, AnnotationOutputter.Options options)
        {
            // It's lying; we need the "redundant" casts (as of 2014-09-08)
            PrintWriter writer = new PrintWriter(IOUtils.EncodedOutputStreamWriter(target, options.encoding));

            JSONOutputter.JSONWriter l0 = new JSONOutputter.JSONWriter(writer, options);
            l0.Object(null);
            // Add annotations attached to a Document
            // Add sentences
            // Add a single sentence
            // (metadata)
            // (constituency tree)
            // note the '==' -- we're overwriting the default, but only if it was not explicitly set otherwise
            // strip the trailing newline
            // (dependency trees)
            // (sentiment)
            // (openie)
            // (kbp)
            // (entity mentions)
            //l3.set("originalText", m.get(CoreAnnotations.OriginalTextAnnotation.class));
            //l3.set("lemma", m.get(CoreAnnotations.LemmaAnnotation.class));
            //l3.set("pos", m.get(CoreAnnotations.PartOfSpeechAnnotation.class));
            // Timex
            // (add tokens)
            // Add a single token
            // Timex
            // Add coref values
            // quotes
            // sections
            // Set char start
            // Set char end
            // Set author
            // Set date time
            // add the sentence indexes for the sentences in this section
            l0.Flush();
        }
        public virtual void Evaluate(IParserQuery query, Tree gold, PrintWriter pw)
        {
            if (!(query is ShiftReduceParserQuery))
            {
                throw new ArgumentException("This evaluator only works for the ShiftReduceParser");
            }
            ShiftReduceParserQuery srquery = (ShiftReduceParserQuery)query;

            try
            {
                switch (mode)
                {
                case TreeRecorder.Mode.Binarized:
                {
                    @out.Write(srquery.GetBestBinarizedParse().ToString());
                    break;
                }

                case TreeRecorder.Mode.Debinarized:
                {
                    @out.Write(srquery.debinarized.ToString());
                    break;
                }

                default:
                {
                    throw new ArgumentException("Unknown mode " + mode);
                }
                }
                @out.NewLine();
            }
            catch (IOException e)
            {
                throw new RuntimeIOException(e);
            }
        }
Example #10
0
        /// <summary>Parse input and re-write results.</summary>
        /// <exception cref="System.IO.IOException"/>
        internal IDictionary <Bellard.Parameter, IList <TaskResult> > Parse(string inputpath
                                                                            , string outputdir)
        {
            //parse input
            [email protected]("\nParsing " + inputpath + " ... ");
            [email protected]();
            IDictionary <Bellard.Parameter, IList <TaskResult> > parsed = Parse(inputpath);

            [email protected]("DONE");
            //re-write the results
            if (outputdir != null)
            {
                [email protected]("\nWriting to " + outputdir + " ...");
                [email protected]();
                foreach (Bellard.Parameter p in Bellard.Parameter.Values())
                {
                    IList <TaskResult> results = parsed[p];
                    results.Sort();
                    PrintWriter @out = new PrintWriter(new OutputStreamWriter(new FileOutputStream(new
                                                                                                   FilePath(outputdir, p + ".txt")), Charsets.Utf8), true);
                    try
                    {
                        for (int i = 0; i < results.Count; i++)
                        {
                            @out.WriteLine(DistSum.TaskResult2string(p + "." + i, results[i]));
                        }
                    }
                    finally
                    {
                        @out.Close();
                    }
                }
                [email protected]("DONE");
            }
            return(parsed);
        }
Example #11
0
 public virtual void dumpGDL(string path)
 {
     try
     {
         try
         {
             PrintWriter printWriter = new PrintWriter(new FileOutputStream(path));
             printWriter.println("graph: {");
             printWriter.println("    orientation: left_to_right");
             printWriter.println("    layout_algorithm: dfs");
             this.traverseGDL(printWriter, new HashSet());
             printWriter.println("}");
             printWriter.close();
         }
         catch (FileNotFoundException ex)
         {
             [email protected](new StringBuilder().append("Can't write to ").append(path).append(' ').append(ex).toString());
         }
     }
     catch (IOException ex3)
     {
         [email protected](new StringBuilder().append("Trouble writing to ").append(path).append(' ').append(ex3).toString());
     }
 }
Example #12
0
        public static void PrintTrainTree(PrintWriter pw, string message, Tree t)
        {
            PrintWriter myPW;

            if (pw == null)
            {
                myPW = new PrintWriter(System.Console.Out, true);
            }
            else
            {
                myPW = pw;
            }
            if (message != null && pw == null)
            {
                // hard coded to not print message if using file output!
                myPW.Println(message);
            }
            // TODO FIXME:  wtf is this shit
            bool previousState = CategoryWordTag.printWordTag;

            CategoryWordTag.printWordTag = false;
            t.PennPrint(myPW);
            CategoryWordTag.printWordTag = previousState;
        }
        public virtual void TestAddExistingSingleBinaryFile()
        {
            FilePath file = new FilePath(db.WorkTree, "a.txt");

            FileUtils.CreateNewFile(file);
            PrintWriter writer = new PrintWriter(file);

            writer.Write("row1\r\nrow2\u0000");
            writer.Close();
            Git git = new Git(db);

            ((FileBasedConfig)db.GetConfig()).SetString("core", null, "autocrlf", "false");
            git.Add().AddFilepattern("a.txt").Call();
            NUnit.Framework.Assert.AreEqual("[a.txt, mode:100644, content:row1\r\nrow2\u0000]"
                                            , IndexState(CONTENT));
            ((FileBasedConfig)db.GetConfig()).SetString("core", null, "autocrlf", "true");
            git.Add().AddFilepattern("a.txt").Call();
            NUnit.Framework.Assert.AreEqual("[a.txt, mode:100644, content:row1\r\nrow2\u0000]"
                                            , IndexState(CONTENT));
            ((FileBasedConfig)db.GetConfig()).SetString("core", null, "autocrlf", "input");
            git.Add().AddFilepattern("a.txt").Call();
            NUnit.Framework.Assert.AreEqual("[a.txt, mode:100644, content:row1\r\nrow2\u0000]"
                                            , IndexState(CONTENT));
        }
Example #14
0
 public static void WriteConllFile(string outFile, IList <ICoreMap> sentences, IList <DependencyTree> trees)
 {
     try
     {
         PrintWriter output = IOUtils.GetPrintWriter(outFile);
         for (int i = 0; i < sentences.Count; i++)
         {
             ICoreMap          sentence = sentences[i];
             DependencyTree    tree     = trees[i];
             IList <CoreLabel> tokens   = sentence.Get(typeof(CoreAnnotations.TokensAnnotation));
             for (int j = 1; j <= size; ++j)
             {
                 CoreLabel token = tokens[j - 1];
                 output.Printf("%d\t%s\t_\t%s\t%s\t_\t%d\t%s\t_\t_%n", j, token.Word(), token.Tag(), token.Tag(), tree.GetHead(j), tree.GetLabel(j));
             }
             output.Println();
         }
         output.Close();
     }
     catch (Exception e)
     {
         throw new RuntimeIOException(e);
     }
 }
        public static void PrintStats(ICollection <Tree> trees, PrintWriter pw)
        {
            ClassicCounter <int>        wordLengthCounter = new ClassicCounter <int>();
            ClassicCounter <TaggedWord> wordCounter       = new ClassicCounter <TaggedWord>();
            ClassicCounter <ChineseCharacterBasedLexicon.Symbol> charCounter = new ClassicCounter <ChineseCharacterBasedLexicon.Symbol>();
            int counter = 0;

            foreach (Tree tree in trees)
            {
                counter++;
                IList <TaggedWord> taggedWords = tree.TaggedYield();
                foreach (TaggedWord taggedWord in taggedWords)
                {
                    string word = taggedWord.Word();
                    if (word.Equals(LexiconConstants.Boundary))
                    {
                        continue;
                    }
                    wordCounter.IncrementCount(taggedWord);
                    wordLengthCounter.IncrementCount(int.Parse(word.Length));
                    for (int j = 0; j < length; j++)
                    {
                        ChineseCharacterBasedLexicon.Symbol sym = ChineseCharacterBasedLexicon.Symbol.CannonicalSymbol(word[j]);
                        charCounter.IncrementCount(sym);
                    }
                    charCounter.IncrementCount(ChineseCharacterBasedLexicon.Symbol.EndWord);
                }
            }
            ICollection <ChineseCharacterBasedLexicon.Symbol> singletonChars = Counters.KeysBelow(charCounter, 1.5);
            ICollection <TaggedWord> singletonWords     = Counters.KeysBelow(wordCounter, 1.5);
            ClassicCounter <string>  singletonWordPOSes = new ClassicCounter <string>();

            foreach (TaggedWord taggedWord_1 in singletonWords)
            {
                singletonWordPOSes.IncrementCount(taggedWord_1.Tag());
            }
            Distribution <string> singletonWordPOSDist = Distribution.GetDistribution(singletonWordPOSes);
            ClassicCounter <char> singletonCharRads    = new ClassicCounter <char>();

            foreach (ChineseCharacterBasedLexicon.Symbol s in singletonChars)
            {
                singletonCharRads.IncrementCount(char.ValueOf(RadicalMap.GetRadical(s.GetCh())));
            }
            Distribution <char> singletonCharRadDist = Distribution.GetDistribution(singletonCharRads);
            Distribution <int>  wordLengthDist       = Distribution.GetDistribution(wordLengthCounter);
            NumberFormat        percent = new DecimalFormat("##.##%");

            pw.Println("There are " + singletonChars.Count + " singleton chars out of " + (int)charCounter.TotalCount() + " tokens and " + charCounter.Size() + " types found in " + counter + " trees.");
            pw.Println("Thus singletonChars comprise " + percent.Format(singletonChars.Count / charCounter.TotalCount()) + " of tokens and " + percent.Format((double)singletonChars.Count / charCounter.Size()) + " of types.");
            pw.Println();
            pw.Println("There are " + singletonWords.Count + " singleton words out of " + (int)wordCounter.TotalCount() + " tokens and " + wordCounter.Size() + " types.");
            pw.Println("Thus singletonWords comprise " + percent.Format(singletonWords.Count / wordCounter.TotalCount()) + " of tokens and " + percent.Format((double)singletonWords.Count / wordCounter.Size()) + " of types.");
            pw.Println();
            pw.Println("Distribution over singleton word POS:");
            pw.Println(singletonWordPOSDist.ToString());
            pw.Println();
            pw.Println("Distribution over singleton char radicals:");
            pw.Println(singletonCharRadDist.ToString());
            pw.Println();
            pw.Println("Distribution over word length:");
            pw.Println(wordLengthDist);
        }
Example #16
0
 public void printStackTrace(PrintWriter s) => java.Call("printStackTrace", s.java);
Example #17
0
        /// <summary>Segment input and write to output stream.</summary>
        /// <param name="segmenter"/>
        /// <param name="br"/>
        /// <param name="pwOut"/>
        /// <param name="nThreads"/>
        /// <returns>input characters processed per second</returns>
        private static double Decode(Edu.Stanford.Nlp.International.Arabic.Process.ArabicSegmenter segmenter, BufferedReader br, PrintWriter pwOut, int nThreads)
        {
            System.Diagnostics.Debug.Assert(nThreads > 0);
            long nChars    = 0;
            long startTime = Runtime.NanoTime();

            if (nThreads > 1)
            {
                MulticoreWrapper <string, string> wrapper = new MulticoreWrapper <string, string>(nThreads, segmenter);
                try
                {
                    for (string line; (line = br.ReadLine()) != null;)
                    {
                        nChars += line.Length;
                        wrapper.Put(line);
                        while (wrapper.Peek())
                        {
                            pwOut.Println(wrapper.Poll());
                        }
                    }
                    wrapper.Join();
                    while (wrapper.Peek())
                    {
                        pwOut.Println(wrapper.Poll());
                    }
                }
                catch (IOException e)
                {
                    log.Warn(e);
                }
            }
            else
            {
                nChars = segmenter.Segment(br, pwOut);
            }
            long   duration    = Runtime.NanoTime() - startTime;
            double charsPerSec = (double)nChars / (duration / 1000000000.0);

            return(charsPerSec);
        }
Example #18
0
        /// <param name="args"/>
        public static void Main(string[] args)
        {
            // Strips off hyphens
            Properties options = StringUtils.ArgsToProperties(args, OptionArgDefs());

            if (options.Contains("help") || args.Length == 0)
            {
                log.Info(Usage());
                System.Environment.Exit(-1);
            }
            int nThreads = PropertiesUtils.GetInt(options, "nthreads", 1);

            Edu.Stanford.Nlp.International.Arabic.Process.ArabicSegmenter segmenter = GetSegmenter(options);
            // Decode either an evaluation file or raw text
            try
            {
                PrintWriter pwOut;
                if (segmenter.flags.outputEncoding != null)
                {
                    OutputStreamWriter @out = new OutputStreamWriter(System.Console.Out, segmenter.flags.outputEncoding);
                    pwOut = new PrintWriter(@out, true);
                }
                else
                {
                    if (segmenter.flags.inputEncoding != null)
                    {
                        OutputStreamWriter @out = new OutputStreamWriter(System.Console.Out, segmenter.flags.inputEncoding);
                        pwOut = new PrintWriter(@out, true);
                    }
                    else
                    {
                        pwOut = new PrintWriter(System.Console.Out, true);
                    }
                }
                if (segmenter.flags.testFile != null)
                {
                    if (segmenter.flags.answerFile == null)
                    {
                        segmenter.Evaluate(pwOut);
                    }
                    else
                    {
                        Edu.Stanford.Nlp.International.Arabic.Process.ArabicSegmenter.EvaluateRawText(pwOut);
                    }
                }
                else
                {
                    BufferedReader br          = (segmenter.flags.textFile == null) ? IOUtils.ReaderFromStdin() : IOUtils.ReaderFromString(segmenter.flags.textFile, segmenter.flags.inputEncoding);
                    double         charsPerSec = Decode(segmenter, br, pwOut, nThreads);
                    IOUtils.CloseIgnoringExceptions(br);
                    System.Console.Error.Printf("Done! Processed input text at %.2f input characters/second%n", charsPerSec);
                }
            }
            catch (UnsupportedEncodingException e)
            {
                Sharpen.Runtime.PrintStackTrace(e);
            }
            catch (IOException)
            {
                System.Console.Error.Printf("%s: Could not open %s%n", typeof(Edu.Stanford.Nlp.International.Arabic.Process.ArabicSegmenter).FullName, segmenter.flags.textFile);
            }
        }
Example #19
0
        /// <summary>
        /// Evaluate accuracy when the input is gold segmented text *with* segmentation
        /// markers and morphological analyses.
        /// </summary>
        /// <remarks>
        /// Evaluate accuracy when the input is gold segmented text *with* segmentation
        /// markers and morphological analyses. In other words, the evaluation file has the
        /// same format as the training data.
        /// </remarks>
        /// <param name="pwOut"/>
        private void Evaluate(PrintWriter pwOut)
        {
            log.Info("Starting evaluation...");
            bool hasSegmentationMarkers = true;
            bool hasTags = true;
            IDocumentReaderAndWriter <CoreLabel> docReader = new ArabicDocumentReaderAndWriter(hasSegmentationMarkers, hasTags, hasDomainLabels, domain, tf);
            ObjectBank <IList <CoreLabel> >      lines     = classifier.MakeObjectBankFromFile(flags.testFile, docReader);
            PrintWriter tedEvalGoldTree  = null;
            PrintWriter tedEvalParseTree = null;
            PrintWriter tedEvalGoldSeg   = null;
            PrintWriter tedEvalParseSeg  = null;

            if (tedEvalPrefix != null)
            {
                try
                {
                    tedEvalGoldTree  = new PrintWriter(tedEvalPrefix + "_gold.ftree");
                    tedEvalGoldSeg   = new PrintWriter(tedEvalPrefix + "_gold.segmentation");
                    tedEvalParseTree = new PrintWriter(tedEvalPrefix + "_parse.ftree");
                    tedEvalParseSeg  = new PrintWriter(tedEvalPrefix + "_parse.segmentation");
                }
                catch (FileNotFoundException e)
                {
                    System.Console.Error.Printf("%s: %s%n", typeof(Edu.Stanford.Nlp.International.Arabic.Process.ArabicSegmenter).FullName, e.Message);
                }
            }
            ICounter <string> labelTotal   = new ClassicCounter <string>();
            ICounter <string> labelCorrect = new ClassicCounter <string>();
            int total   = 0;
            int correct = 0;

            foreach (IList <CoreLabel> line in lines)
            {
                string[] inputTokens = TedEvalSanitize(IOBUtils.IOBToString(line).ReplaceAll(":", "#pm#")).Split(" ");
                string[] goldTokens  = TedEvalSanitize(IOBUtils.IOBToString(line, ":")).Split(" ");
                line = classifier.Classify(line);
                string[] parseTokens = TedEvalSanitize(IOBUtils.IOBToString(line, ":")).Split(" ");
                foreach (CoreLabel label in line)
                {
                    // Do not evaluate labeling of whitespace
                    string observation = label.Get(typeof(CoreAnnotations.CharAnnotation));
                    if (!observation.Equals(IOBUtils.GetBoundaryCharacter()))
                    {
                        total++;
                        string hypothesis = label.Get(typeof(CoreAnnotations.AnswerAnnotation));
                        string reference  = label.Get(typeof(CoreAnnotations.GoldAnswerAnnotation));
                        labelTotal.IncrementCount(reference);
                        if (hypothesis.Equals(reference))
                        {
                            correct++;
                            labelCorrect.IncrementCount(reference);
                        }
                    }
                }
                if (tedEvalParseSeg != null)
                {
                    tedEvalGoldTree.Printf("(root");
                    tedEvalParseTree.Printf("(root");
                    int safeLength = inputTokens.Length;
                    if (inputTokens.Length != goldTokens.Length)
                    {
                        log.Info("In generating TEDEval files: Input and gold do not have the same number of tokens");
                        log.Info("    (ignoring any extras)");
                        log.Info("  input: " + Arrays.ToString(inputTokens));
                        log.Info("  gold: " + Arrays.ToString(goldTokens));
                        safeLength = Math.Min(inputTokens.Length, goldTokens.Length);
                    }
                    if (inputTokens.Length != parseTokens.Length)
                    {
                        log.Info("In generating TEDEval files: Input and parse do not have the same number of tokens");
                        log.Info("    (ignoring any extras)");
                        log.Info("  input: " + Arrays.ToString(inputTokens));
                        log.Info("  parse: " + Arrays.ToString(parseTokens));
                        safeLength = Math.Min(inputTokens.Length, parseTokens.Length);
                    }
                    for (int i = 0; i < safeLength; i++)
                    {
                        foreach (string segment in goldTokens[i].Split(":"))
                        {
                            tedEvalGoldTree.Printf(" (seg %s)", segment);
                        }
                        tedEvalGoldSeg.Printf("%s\t%s%n", inputTokens[i], goldTokens[i]);
                        foreach (string segment_1 in parseTokens[i].Split(":"))
                        {
                            tedEvalParseTree.Printf(" (seg %s)", segment_1);
                        }
                        tedEvalParseSeg.Printf("%s\t%s%n", inputTokens[i], parseTokens[i]);
                    }
                    tedEvalGoldTree.Printf(")%n");
                    tedEvalGoldSeg.Println();
                    tedEvalParseTree.Printf(")%n");
                    tedEvalParseSeg.Println();
                }
            }
            double accuracy = ((double)correct) / ((double)total);

            accuracy *= 100.0;
            pwOut.Println("EVALUATION RESULTS");
            pwOut.Printf("#datums:\t%d%n", total);
            pwOut.Printf("#correct:\t%d%n", correct);
            pwOut.Printf("accuracy:\t%.2f%n", accuracy);
            pwOut.Println("==================");
            // Output the per label accuracies
            pwOut.Println("PER LABEL ACCURACIES");
            foreach (string refLabel in labelTotal.KeySet())
            {
                double nTotal   = labelTotal.GetCount(refLabel);
                double nCorrect = labelCorrect.GetCount(refLabel);
                double acc      = (nCorrect / nTotal) * 100.0;
                pwOut.Printf(" %s\t%.2f%n", refLabel, acc);
            }
            if (tedEvalParseSeg != null)
            {
                tedEvalGoldTree.Close();
                tedEvalGoldSeg.Close();
                tedEvalParseTree.Close();
                tedEvalParseSeg.Close();
            }
        }
Example #20
0
        /// <summary>
        /// this test performs a number of searches. It also compares output
        ///  of searches using multi-file index segments with single-file
        ///  index segments.
        /// 
        ///  TODO: someone should check that the results of the searches are
        ///        still correct by adding assert statements. Right now, the test
        ///        passes if the results are the same between multi-file and
        ///        single-file formats, even if the results are wrong.
        /// </summary>
        public virtual void TestSearch()
        {
            StringWriter sw = new StringWriter();
              PrintWriter pw = new PrintWriter(sw, true);
              DoTestSearch(random(), pw, false);
              pw.close();
              sw.close();
              string multiFileOutput = sw.ToString();
              //System.out.println(multiFileOutput);

              sw = new StringWriter();
              pw = new PrintWriter(sw, true);
              DoTestSearch(random(), pw, true);
              pw.close();
              sw.close();
              string singleFileOutput = sw.ToString();

              Assert.AreEqual(multiFileOutput, singleFileOutput);
        }
        public virtual void PrintLattice(DFSA <string, int> tagLattice, IList <CoreLabel> doc, PrintWriter @out)
        {
            CoreLabel[] docArray = Sharpen.Collections.ToArray(doc, new CoreLabel[doc.Count]);
            // Create answer lattice:
            MutableInteger          nodeId        = new MutableInteger(0);
            DFSA <string, int>      answerLattice = new DFSA <string, int>(null);
            DFSAState <string, int> aInitState    = new DFSAState <string, int>(nodeId, answerLattice);

            answerLattice.SetInitialState(aInitState);
            IDictionary <DFSAState <string, int>, DFSAState <string, int> > stateLinks = Generics.NewHashMap();

            // Convert binary lattice into word lattice:
            TagLatticeToAnswerLattice(tagLattice.InitialState(), aInitState, new StringBuilder(string.Empty), nodeId, 0, 0.0, stateLinks, answerLattice, docArray);
            try
            {
                answerLattice.PrintAttFsmFormat(@out);
            }
            catch (IOException e)
            {
                throw new Exception(e);
            }
        }
Example #22
0
        public static void Main(string[] args)
        {
            if (args.Length < minArgs)
            {
                System.Console.Out.WriteLine(usage.ToString());
                System.Environment.Exit(-1);
            }
            ITreebankLangParserParams tlpp = new EnglishTreebankParserParams();
            DiskTreebank tb       = null;
            string       encoding = "UTF-8";
            Language     lang     = Language.English;

            for (int i = 0; i < args.Length; i++)
            {
                if (args[i].StartsWith("-"))
                {
                    switch (args[i])
                    {
                    case "-l":
                    {
                        lang = Language.ValueOf(args[++i].Trim());
                        tlpp = lang.@params;
                        break;
                    }

                    case "-e":
                    {
                        encoding = args[++i];
                        break;
                    }

                    default:
                    {
                        System.Console.Out.WriteLine(usage.ToString());
                        System.Environment.Exit(-1);
                        break;
                    }
                    }
                }
                else
                {
                    if (tb == null)
                    {
                        if (tlpp == null)
                        {
                            System.Console.Out.WriteLine(usage.ToString());
                            System.Environment.Exit(-1);
                        }
                        else
                        {
                            tlpp.SetInputEncoding(encoding);
                            tlpp.SetOutputEncoding(encoding);
                            tb = tlpp.DiskTreebank();
                        }
                    }
                    tb.LoadPath(args[i]);
                }
            }
            PrintWriter pw = tlpp.Pw();
            Options     op = new Options();

            Options.LexOptions lexOptions = op.lexOptions;
            if (lang == Language.French)
            {
                lexOptions.useUnknownWordSignatures = 1;
                lexOptions.smartMutation            = false;
                lexOptions.unknownSuffixSize        = 2;
                lexOptions.unknownPrefixSize        = 1;
            }
            else
            {
                if (lang == Language.Arabic)
                {
                    lexOptions.smartMutation            = false;
                    lexOptions.useUnknownWordSignatures = 9;
                    lexOptions.unknownPrefixSize        = 1;
                    lexOptions.unknownSuffixSize        = 1;
                }
            }
            IIndex <string>   wordIndex    = new HashIndex <string>();
            IIndex <string>   tagIndex     = new HashIndex <string>();
            ILexicon          lex          = tlpp.Lex(op, wordIndex, tagIndex);
            int               computeAfter = (int)(0.50 * tb.Count);
            ICounter <string> vocab        = new ClassicCounter <string>();
            ICounter <string> unkCounter   = new ClassicCounter <string>();
            int               treeId       = 0;

            foreach (Tree t in tb)
            {
                IList <ILabel> yield = t.Yield();
                int            posId = 0;
                foreach (ILabel word in yield)
                {
                    vocab.IncrementCount(word.Value());
                    if (treeId > computeAfter && vocab.GetCount(word.Value()) < 2.0)
                    {
                        //          if(lex.getUnknownWordModel().getSignature(word.value(), posId++).equals("UNK"))
                        //            pw.println(word.value());
                        unkCounter.IncrementCount(lex.GetUnknownWordModel().GetSignature(word.Value(), posId++));
                    }
                }
                treeId++;
            }
            IList <string> biggestKeys = new List <string>(unkCounter.KeySet());

            biggestKeys.Sort(Counters.ToComparatorDescending(unkCounter));
            foreach (string wordType in biggestKeys)
            {
                pw.Printf("%s\t%d%n", wordType, (int)unkCounter.GetCount(wordType));
            }
            pw.Close();
            pw.Close();
        }
Example #23
0
 public JSONWriter(PrintWriter writer, AnnotationOutputter.Options options)
 {
     this.writer  = writer;
     this.options = options;
 }
        /// <exception cref="System.IO.IOException"/>
        public static void Main(string[] args)
        {
            IDictionary <string, int> flagsToNumArgs = Generics.NewHashMap();

            flagsToNumArgs["-parser"]        = int.Parse(3);
            flagsToNumArgs["-lex"]           = int.Parse(3);
            flagsToNumArgs["-test"]          = int.Parse(2);
            flagsToNumArgs["-out"]           = int.Parse(1);
            flagsToNumArgs["-lengthPenalty"] = int.Parse(1);
            flagsToNumArgs["-penaltyType"]   = int.Parse(1);
            flagsToNumArgs["-maxLength"]     = int.Parse(1);
            flagsToNumArgs["-stats"]         = int.Parse(2);
            IDictionary <string, string[]> argMap = StringUtils.ArgsToMap(args, flagsToNumArgs);
            bool        eval = argMap.Contains("-eval");
            PrintWriter pw   = null;

            if (argMap.Contains("-out"))
            {
                pw = new PrintWriter(new OutputStreamWriter(new FileOutputStream((argMap["-out"])[0]), "GB18030"), true);
            }
            log.Info("ChineseCharacterBasedLexicon called with args:");
            ChineseTreebankParserParams ctpp = new ChineseTreebankParserParams();

            for (int i = 0; i < args.Length; i++)
            {
                ctpp.SetOptionFlag(args, i);
                log.Info(" " + args[i]);
            }
            log.Info();
            Options op = new Options(ctpp);

            if (argMap.Contains("-stats"))
            {
                string[]       statArgs         = (argMap["-stats"]);
                MemoryTreebank rawTrainTreebank = op.tlpParams.MemoryTreebank();
                IFileFilter    trainFilt        = new NumberRangesFileFilter(statArgs[1], false);
                rawTrainTreebank.LoadPath(new File(statArgs[0]), trainFilt);
                log.Info("Done reading trees.");
                MemoryTreebank trainTreebank;
                if (argMap.Contains("-annotate"))
                {
                    trainTreebank = new MemoryTreebank();
                    TreeAnnotator annotator = new TreeAnnotator(ctpp.HeadFinder(), ctpp, op);
                    foreach (Tree tree in rawTrainTreebank)
                    {
                        trainTreebank.Add(annotator.TransformTree(tree));
                    }
                    log.Info("Done annotating trees.");
                }
                else
                {
                    trainTreebank = rawTrainTreebank;
                }
                PrintStats(trainTreebank, pw);
                System.Environment.Exit(0);
            }
            int maxLength = 1000000;

            //    Test.verbose = true;
            if (argMap.Contains("-norm"))
            {
                op.testOptions.lengthNormalization = true;
            }
            if (argMap.Contains("-maxLength"))
            {
                maxLength = System.Convert.ToInt32((argMap["-maxLength"])[0]);
            }
            op.testOptions.maxLength = 120;
            bool combo = argMap.Contains("-combo");

            if (combo)
            {
                ctpp.useCharacterBasedLexicon = true;
                op.testOptions.maxSpanForTags = 10;
                op.doDep  = false;
                op.dcTags = false;
            }
            LexicalizedParser lp  = null;
            ILexicon          lex = null;

            if (argMap.Contains("-parser"))
            {
                string[] parserArgs = (argMap["-parser"]);
                if (parserArgs.Length > 1)
                {
                    IFileFilter trainFilt = new NumberRangesFileFilter(parserArgs[1], false);
                    lp = LexicalizedParser.TrainFromTreebank(parserArgs[0], trainFilt, op);
                    if (parserArgs.Length == 3)
                    {
                        string filename = parserArgs[2];
                        log.Info("Writing parser in serialized format to file " + filename + " ");
                        System.Console.Error.Flush();
                        ObjectOutputStream @out = IOUtils.WriteStreamFromString(filename);
                        @out.WriteObject(lp);
                        @out.Close();
                        log.Info("done.");
                    }
                }
                else
                {
                    string parserFile = parserArgs[0];
                    lp = LexicalizedParser.LoadModel(parserFile, op);
                }
                lex  = lp.GetLexicon();
                op   = lp.GetOp();
                ctpp = (ChineseTreebankParserParams)op.tlpParams;
            }
            if (argMap.Contains("-rad"))
            {
                ctpp.useUnknownCharacterModel = true;
            }
            if (argMap.Contains("-lengthPenalty"))
            {
                ctpp.lengthPenalty = double.Parse((argMap["-lengthPenalty"])[0]);
            }
            if (argMap.Contains("-penaltyType"))
            {
                ctpp.penaltyType = System.Convert.ToInt32((argMap["-penaltyType"])[0]);
            }
            if (argMap.Contains("-lex"))
            {
                string[] lexArgs = (argMap["-lex"]);
                if (lexArgs.Length > 1)
                {
                    IIndex <string> wordIndex = new HashIndex <string>();
                    IIndex <string> tagIndex  = new HashIndex <string>();
                    lex = ctpp.Lex(op, wordIndex, tagIndex);
                    MemoryTreebank rawTrainTreebank = op.tlpParams.MemoryTreebank();
                    IFileFilter    trainFilt        = new NumberRangesFileFilter(lexArgs[1], false);
                    rawTrainTreebank.LoadPath(new File(lexArgs[0]), trainFilt);
                    log.Info("Done reading trees.");
                    MemoryTreebank trainTreebank;
                    if (argMap.Contains("-annotate"))
                    {
                        trainTreebank = new MemoryTreebank();
                        TreeAnnotator annotator = new TreeAnnotator(ctpp.HeadFinder(), ctpp, op);
                        foreach (Tree tree in rawTrainTreebank)
                        {
                            tree = annotator.TransformTree(tree);
                            trainTreebank.Add(tree);
                        }
                        log.Info("Done annotating trees.");
                    }
                    else
                    {
                        trainTreebank = rawTrainTreebank;
                    }
                    lex.InitializeTraining(trainTreebank.Count);
                    lex.Train(trainTreebank);
                    lex.FinishTraining();
                    log.Info("Done training lexicon.");
                    if (lexArgs.Length == 3)
                    {
                        string filename = lexArgs.Length == 3 ? lexArgs[2] : "parsers/chineseCharLex.ser.gz";
                        log.Info("Writing lexicon in serialized format to file " + filename + " ");
                        System.Console.Error.Flush();
                        ObjectOutputStream @out = IOUtils.WriteStreamFromString(filename);
                        @out.WriteObject(lex);
                        @out.Close();
                        log.Info("done.");
                    }
                }
                else
                {
                    string lexFile = lexArgs.Length == 1 ? lexArgs[0] : "parsers/chineseCharLex.ser.gz";
                    log.Info("Reading Lexicon from file " + lexFile);
                    ObjectInputStream @in = IOUtils.ReadStreamFromString(lexFile);
                    try
                    {
                        lex = (ILexicon)@in.ReadObject();
                    }
                    catch (TypeLoadException)
                    {
                        throw new Exception("Bad serialized file: " + lexFile);
                    }
                    @in.Close();
                }
            }
            if (argMap.Contains("-test"))
            {
                bool segmentWords = ctpp.segment;
                bool parse        = lp != null;
                System.Diagnostics.Debug.Assert((parse || segmentWords));
                //      WordCatConstituent.collinizeWords = argMap.containsKey("-collinizeWords");
                //      WordCatConstituent.collinizeTags = argMap.containsKey("-collinizeTags");
                IWordSegmenter seg = null;
                if (segmentWords)
                {
                    seg = (IWordSegmenter)lex;
                }
                string[]       testArgs     = (argMap["-test"]);
                MemoryTreebank testTreebank = op.tlpParams.MemoryTreebank();
                IFileFilter    testFilt     = new NumberRangesFileFilter(testArgs[1], false);
                testTreebank.LoadPath(new File(testArgs[0]), testFilt);
                ITreeTransformer          subcategoryStripper = op.tlpParams.SubcategoryStripper();
                ITreeTransformer          collinizer          = ctpp.Collinizer();
                WordCatEquivalenceClasser eqclass             = new WordCatEquivalenceClasser();
                WordCatEqualityChecker    eqcheck             = new WordCatEqualityChecker();
                EquivalenceClassEval      basicEval           = new EquivalenceClassEval(eqclass, eqcheck, "basic");
                EquivalenceClassEval      collinsEval         = new EquivalenceClassEval(eqclass, eqcheck, "collinized");
                IList <string>            evalTypes           = new List <string>(3);
                bool goodPOS = false;
                if (segmentWords)
                {
                    evalTypes.Add(WordCatConstituent.wordType);
                    if (ctpp.segmentMarkov && !parse)
                    {
                        evalTypes.Add(WordCatConstituent.tagType);
                        goodPOS = true;
                    }
                }
                if (parse)
                {
                    evalTypes.Add(WordCatConstituent.tagType);
                    evalTypes.Add(WordCatConstituent.catType);
                    if (combo)
                    {
                        evalTypes.Add(WordCatConstituent.wordType);
                        goodPOS = true;
                    }
                }
                TreeToBracketProcessor proc = new TreeToBracketProcessor(evalTypes);
                log.Info("Testing...");
                foreach (Tree goldTop in testTreebank)
                {
                    Tree             gold         = goldTop.FirstChild();
                    IList <IHasWord> goldSentence = gold.YieldHasWord();
                    if (goldSentence.Count > maxLength)
                    {
                        log.Info("Skipping sentence; too long: " + goldSentence.Count);
                        continue;
                    }
                    else
                    {
                        log.Info("Processing sentence; length: " + goldSentence.Count);
                    }
                    IList <IHasWord> s;
                    if (segmentWords)
                    {
                        StringBuilder goldCharBuf = new StringBuilder();
                        foreach (IHasWord aGoldSentence in goldSentence)
                        {
                            StringLabel word = (StringLabel)aGoldSentence;
                            goldCharBuf.Append(word.Value());
                        }
                        string goldChars = goldCharBuf.ToString();
                        s = seg.Segment(goldChars);
                    }
                    else
                    {
                        s = goldSentence;
                    }
                    Tree tree;
                    if (parse)
                    {
                        tree = lp.ParseTree(s);
                        if (tree == null)
                        {
                            throw new Exception("PARSER RETURNED NULL!!!");
                        }
                    }
                    else
                    {
                        tree = Edu.Stanford.Nlp.Trees.Trees.ToFlatTree(s);
                        tree = subcategoryStripper.TransformTree(tree);
                    }
                    if (pw != null)
                    {
                        if (parse)
                        {
                            tree.PennPrint(pw);
                        }
                        else
                        {
                            IEnumerator sentIter = s.GetEnumerator();
                            for (; ;)
                            {
                                Word word = (Word)sentIter.Current;
                                pw.Print(word.Word());
                                if (sentIter.MoveNext())
                                {
                                    pw.Print(" ");
                                }
                                else
                                {
                                    break;
                                }
                            }
                        }
                        pw.Println();
                    }
                    if (eval)
                    {
                        ICollection ourBrackets;
                        ICollection goldBrackets;
                        ourBrackets  = proc.AllBrackets(tree);
                        goldBrackets = proc.AllBrackets(gold);
                        if (goodPOS)
                        {
                            Sharpen.Collections.AddAll(ourBrackets, TreeToBracketProcessor.CommonWordTagTypeBrackets(tree, gold));
                            Sharpen.Collections.AddAll(goldBrackets, TreeToBracketProcessor.CommonWordTagTypeBrackets(gold, tree));
                        }
                        basicEval.Eval(ourBrackets, goldBrackets);
                        System.Console.Out.WriteLine("\nScores:");
                        basicEval.DisplayLast();
                        Tree collinsTree = collinizer.TransformTree(tree);
                        Tree collinsGold = collinizer.TransformTree(gold);
                        ourBrackets  = proc.AllBrackets(collinsTree);
                        goldBrackets = proc.AllBrackets(collinsGold);
                        if (goodPOS)
                        {
                            Sharpen.Collections.AddAll(ourBrackets, TreeToBracketProcessor.CommonWordTagTypeBrackets(collinsTree, collinsGold));
                            Sharpen.Collections.AddAll(goldBrackets, TreeToBracketProcessor.CommonWordTagTypeBrackets(collinsGold, collinsTree));
                        }
                        collinsEval.Eval(ourBrackets, goldBrackets);
                        System.Console.Out.WriteLine("\nCollinized scores:");
                        collinsEval.DisplayLast();
                        System.Console.Out.WriteLine();
                    }
                }
                if (eval)
                {
                    basicEval.Display();
                    System.Console.Out.WriteLine();
                    collinsEval.Display();
                }
            }
        }
        private void DoTest(Random random, PrintWriter @out, bool useCompoundFiles, int MAX_DOCS)
        {
            Directory directory = newDirectory();
              Analyzer analyzer = new MockAnalyzer(random);
              IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
              MergePolicy mp = conf.MergePolicy;
              mp.NoCFSRatio = useCompoundFiles ? 1.0 : 0.0;
              IndexWriter writer = new IndexWriter(directory, conf);
              if (VERBOSE)
              {
            Console.WriteLine("TEST: now build index MAX_DOCS=" + MAX_DOCS);
              }

              for (int j = 0; j < MAX_DOCS; j++)
              {
            Document d = new Document();
            d.Add(newTextField(PRIORITY_FIELD, HIGH_PRIORITY, Field.Store.YES));
            d.Add(newTextField(ID_FIELD, Convert.ToString(j), Field.Store.YES));
            writer.addDocument(d);
              }
              writer.Dispose();

              // try a search without OR
              IndexReader reader = DirectoryReader.Open(directory);
              IndexSearcher searcher = newSearcher(reader);

              Query query = new TermQuery(new Term(PRIORITY_FIELD, HIGH_PRIORITY));
              @out.println("Query: " + query.ToString(PRIORITY_FIELD));
              if (VERBOSE)
              {
            Console.WriteLine("TEST: search query=" + query);
              }

              Sort sort = new Sort(SortField.FIELD_SCORE, new SortField(ID_FIELD, SortField.Type.INT));

              ScoreDoc[] hits = searcher.Search(query, null, MAX_DOCS, sort).scoreDocs;
              PrintHits(@out, hits, searcher);
              CheckHits(hits, MAX_DOCS, searcher);

              // try a new search with OR
              searcher = newSearcher(reader);
              hits = null;

              BooleanQuery booleanQuery = new BooleanQuery();
              booleanQuery.Add(new TermQuery(new Term(PRIORITY_FIELD, HIGH_PRIORITY)), BooleanClause.Occur_e.SHOULD);
              booleanQuery.Add(new TermQuery(new Term(PRIORITY_FIELD, MED_PRIORITY)), BooleanClause.Occur_e.SHOULD);
              @out.println("Query: " + booleanQuery.ToString(PRIORITY_FIELD));

              hits = searcher.search(booleanQuery, null, MAX_DOCS, sort).scoreDocs;
              PrintHits(@out, hits, searcher);
              CheckHits(hits, MAX_DOCS, searcher);

              reader.Close();
              directory.Close();
        }
Example #26
0
		// Handy dandy for dumping an action list during debugging
		public static System.String actionListToString(ActionList al, System.String[] args)
		{
			// cut and paste arg code from main() could be better but it works
			bool showActions = true;
			bool showOffset = false;
			bool showDebugSource = false;
			bool decompile = false;
			bool defunc = true;
			bool tabbedGlyphs = true;
			int index = 0;
			
			while (args != null && (index < args.Length) && (args[index].StartsWith("-")))
			{
				if (args[index].Equals("-decompile"))
				{
					decompile = true;
					++index;
				}
				else if (args[index].Equals("-nofunctions"))
				{
					defunc = false;
					++index;
				}
				else if (args[index].Equals("-asm"))
				{
					decompile = false;
					++index;
				}
				else if (args[index].Equals("-noactions"))
				{
					showActions = false;
					++index;
				}
				else if (args[index].Equals("-showoffset"))
				{
					showOffset = true;
					++index;
				}
				else if (args[index].Equals("-showdebugsource"))
				{
					showDebugSource = true;
					++index;
				}
				else if (args[index].ToUpper().Equals("-tabbedGlyphs".ToUpper()))
				{
					tabbedGlyphs = true;
					++index;
				}
			}
			
			System.IO.StringWriter sw = new System.IO.StringWriter();
			//UPGRADE_ISSUE: Constructor 'java.io.PrintWriter.PrintWriter' was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1000_javaioPrintWriterPrintWriter_javaioWriter'"
			System.IO.StreamWriter out_Renamed = new PrintWriter(sw);
			SwfxPrinter printer = new SwfxPrinter(out_Renamed);
			printer.showActions = showActions;
			printer.showOffset = showOffset;
			printer.showDebugSource = showDebugSource;
			printer.decompile = decompile;
			printer.defunc = defunc;
			printer.tabbedGlyphs = tabbedGlyphs;
			
			printer.printActions(al);
			out_Renamed.Flush();
			return sw.ToString();
		}
 private void PrintHits(PrintWriter @out, ScoreDoc[] hits, IndexSearcher searcher)
 {
     @out.println(hits.Length + " total results\n");
     for (int i = 0 ; i < hits.Length; i++)
     {
       if (i < 10 || (i > 94 && i < 105))
       {
     Document d = searcher.Doc(hits[i].Doc);
     @out.println(i + " " + d.Get(ID_FIELD));
       }
     }
 }
 public override void Dump(FileDescriptor fd, PrintWriter writer, string[] args)
 {
     base.Dump(fd, writer, args);
 }
Example #29
0
		public virtual void list(PrintWriter @out)
		{
			Delegate.list(@out);
		}
Example #30
0
 /// <exception cref="System.Exception"/>
 protected internal override void SerializeTextClassifier(PrintWriter pw)
 {
     base.SerializeTextClassifier(pw);
     pw.Printf("nodeFeatureIndicesMap.size()=\t%d%n", nodeFeatureIndicesMap.Size());
     for (int i = 0; i < nodeFeatureIndicesMap.Size(); i++)
     {
         pw.Printf("%d\t%d%n", i, nodeFeatureIndicesMap.Get(i));
     }
     pw.Printf("edgeFeatureIndicesMap.size()=\t%d%n", edgeFeatureIndicesMap.Size());
     for (int i_1 = 0; i_1 < edgeFeatureIndicesMap.Size(); i_1++)
     {
         pw.Printf("%d\t%d%n", i_1, edgeFeatureIndicesMap.Get(i_1));
     }
     if (flags.secondOrderNonLinear)
     {
         pw.Printf("inputLayerWeights4Edge.length=\t%d%n", inputLayerWeights4Edge.Length);
         foreach (double[] ws in inputLayerWeights4Edge)
         {
             List <double> list = new List <double>();
             foreach (double w in ws)
             {
                 list.Add(w);
             }
             pw.Printf("%d\t%s%n", ws.Length, StringUtils.Join(list, " "));
         }
         pw.Printf("outputLayerWeights4Edge.length=\t%d%n", outputLayerWeights4Edge.Length);
         foreach (double[] ws_1 in outputLayerWeights4Edge)
         {
             List <double> list = new List <double>();
             foreach (double w in ws_1)
             {
                 list.Add(w);
             }
             pw.Printf("%d\t%s%n", ws_1.Length, StringUtils.Join(list, " "));
         }
     }
     else
     {
         pw.Printf("linearWeights.length=\t%d%n", linearWeights.Length);
         foreach (double[] ws in linearWeights)
         {
             List <double> list = new List <double>();
             foreach (double w in ws)
             {
                 list.Add(w);
             }
             pw.Printf("%d\t%s%n", ws.Length, StringUtils.Join(list, " "));
         }
     }
     pw.Printf("inputLayerWeights.length=\t%d%n", inputLayerWeights.Length);
     foreach (double[] ws_2 in inputLayerWeights)
     {
         List <double> list = new List <double>();
         foreach (double w in ws_2)
         {
             list.Add(w);
         }
         pw.Printf("%d\t%s%n", ws_2.Length, StringUtils.Join(list, " "));
     }
     pw.Printf("outputLayerWeights.length=\t%d%n", outputLayerWeights.Length);
     foreach (double[] ws_3 in outputLayerWeights)
     {
         List <double> list = new List <double>();
         foreach (double w in ws_3)
         {
             list.Add(w);
         }
         pw.Printf("%d\t%s%n", ws_3.Length, StringUtils.Join(list, " "));
     }
 }
Example #31
0
        // = false;
        // not an instantiable class
        /// <summary>Usage: java edu.stanford.nlp.trees.tregex.tsurgeon.Tsurgeon [-s] -treeFile file-with-trees [-po matching-pattern operation] operation-file-1 operation-file-2 ...</summary>
        /// <remarks>
        /// Usage: java edu.stanford.nlp.trees.tregex.tsurgeon.Tsurgeon [-s] -treeFile file-with-trees [-po matching-pattern operation] operation-file-1 operation-file-2 ... operation-file-n
        /// <h4>Arguments:</h4>
        /// Each argument should be the name of a transformation file that contains a list of pattern
        /// and transformation operation list pairs.  That is, it is a sequence of pairs of a
        /// <see cref="Edu.Stanford.Nlp.Trees.Tregex.TregexPattern"/>
        /// pattern on one or more lines, then a
        /// blank line (empty or whitespace), then a list of transformation operations one per line
        /// (as specified by <b>Legal operation syntax</b> below) to apply when the pattern is matched,
        /// and then another blank line (empty or whitespace).
        /// Note the need for blank lines: The code crashes if they are not present as separators
        /// (although the blank line at the end of the file can be omitted).
        /// The script file can include comment lines, either whole comment lines or
        /// trailing comments introduced by %, which extend to the end of line.  A needed percent
        /// mark can be escaped by a preceding backslash.
        /// <p>
        /// For example, if you want to excise an SBARQ node whenever it is the parent of an SQ node,
        /// and relabel the SQ node to S, your transformation file would look like this:
        /// <blockquote>
        /// <code>
        /// SBARQ=n1 &lt; SQ=n2<br />
        /// <br />
        /// excise n1 n1<br />
        /// relabel n2 S
        /// </code>
        /// </blockquote>
        /// <h4>Options:</h4>
        /// <ul>
        /// <li>
        /// <c>-treeFile &lt;filename&gt;</c>
        /// specify the name of the file that has the trees you want to transform.
        /// <li>
        /// <c>-po &lt;matchPattern&gt; &lt;operation&gt;</c>
        /// Apply a single operation to every tree using the specified match pattern and the specified operation.  Use this option
        /// when you want to quickly try the effect of one pattern/surgery combination, and are too lazy to write a transformation file.
        /// <li>
        /// <c>-s</c>
        /// Print each output tree on one line (default is pretty-printing).
        /// <li>
        /// <c>-m</c>
        /// For every tree that had a matching pattern, print "before" (prepended as "Operated on:") and "after" (prepended as "Result:").  Unoperated on trees just pass through the transducer as usual.
        /// <li>
        /// <c>-encoding X</c>
        /// Uses character set X for input and output of trees.
        /// <li>
        /// <c>-macros &lt;filename&gt;</c>
        /// A file of macros to use on the tregex pattern.  Macros should be one per line, with original and replacement separated by tabs.
        /// <li>
        /// <c>-hf &lt;headFinder-class-name&gt;</c>
        /// use the specified
        /// <see cref="Edu.Stanford.Nlp.Trees.IHeadFinder"/>
        /// class to determine headship relations.
        /// <li>
        /// <c>-hfArg &lt;string&gt;</c>
        /// pass a string argument in to the
        /// <see cref="Edu.Stanford.Nlp.Trees.IHeadFinder"/>
        /// class's constructor.
        /// <c>-hfArg</c>
        /// can be used multiple times to pass in multiple arguments.
        /// <li>
        /// <c>-trf &lt;TreeReaderFactory-class-name&gt;</c>
        /// use the specified
        /// <see cref="Edu.Stanford.Nlp.Trees.ITreeReaderFactory"/>
        /// class to read trees from files.
        /// </ul>
        /// <h4>Legal operation syntax:</h4>
        /// <ul>
        /// <li>
        /// <c>delete &lt;name&gt;</c>
        /// deletes the node and everything below it.
        /// <li>
        /// <c>prune &lt;name&gt;</c>
        /// Like delete, but if, after the pruning, the parent has no children anymore, the parent is pruned too.  Pruning continues to affect all ancestors until one is found with remaining children.  This may result in a null tree.
        /// <li>
        /// <c>excise &lt;name1&gt; &lt;name2&gt;</c>
        /// The name1 node should either dominate or be the same as the name2 node.  This excises out everything from
        /// name1 to name2.  All the children of name2 go into the parent of name1, where name1 was.
        /// <li>
        /// <c>relabel &lt;name&gt; &lt;new-label&gt;</c>
        /// Relabels the node to have the new label. <br />
        /// There are three possible forms: <br />
        /// <c>relabel nodeX VP</c>
        /// - for changing a node label to an
        /// alphanumeric string <br />
        /// <c>relabel nodeX /''/</c>
        /// - for relabeling a node to
        /// something that isn't a valid identifier without quoting <br />
        /// <c>relabel nodeX /^VB(.*)$/verb\\/$1/</c>
        /// - for regular
        /// expression based relabeling. In this case, all matches of the
        /// regular expression against the node label are replaced with the
        /// replacement String.  This has the semantics of Java/Perl's
        /// replaceAll: you may use capturing groups and put them in
        /// replacements with $n. For example, if the pattern is /foo/bar/
        /// and the node matched is "foo", the replaceAll semantics result in
        /// "barbar".  If the pattern is /^foo(.*)$/bar$1/ and node matched is
        /// "foofoo", relabel will result in "barfoo".  <br />
        /// When using the regex replacement method, you can also use the
        /// sequences ={node} and %{var} in the replacement string to use
        /// captured nodes or variable strings in the replacement string.
        /// For example, if the Tregex pattern was "duck=bar" and the relabel
        /// is /foo/={bar}/, "foofoo" will be replaced with "duckduck". <br />
        /// To concatenate two nodes named in the tregex pattern, for
        /// example, you can use the pattern /^.*$/={foo}={bar}/.  Note that
        /// the ^.*$ is necessary to make sure the regex pattern only matches
        /// and replaces once on the entire node name. <br />
        /// To get an "=" or a "%" in the replacement, using \ escaping.
        /// Also, as in the example you can escape a slash in the middle of
        /// the second and third forms with \\/ and \\\\. <br />
        /// <li>
        /// <c>insert &lt;name&gt; &lt;position&gt;</c>
        /// or
        /// <c>insert &lt;tree&gt; &lt;position&gt;</c>
        /// inserts the named node or tree into the position specified.
        /// <li>
        /// <c>move &lt;name&gt; &lt;position&gt;</c>
        /// moves the named node into the specified position.
        /// <p>Right now the  only ways to specify position are:
        /// <p>
        /// <c>$+ &lt;name&gt;</c>
        /// the left sister of the named node<br />
        /// <c>$- &lt;name&gt;</c>
        /// the right sister of the named node<br />
        /// <c>&gt;i &lt;name&gt;</c>
        /// the i_th daughter of the named node<br />
        /// <c>&gt;-i &lt;name&gt;</c>
        /// the i_th daughter, counting from the right, of the named node.
        /// <li>
        /// <c>replace &lt;name1&gt; &lt;name2&gt;</c>
        /// deletes name1 and inserts a copy of name2 in its place.
        /// <li>
        /// <c>replace &lt;name&gt; &lt;tree&gt; &lt;tree2&gt;...</c>
        /// deletes name and inserts the new tree(s) in its place.  If
        /// more than one replacement tree is given, each of the new
        /// subtrees will be added in order where the old tree was.
        /// Multiple subtrees at the root is an illegal operation and
        /// will throw an exception.
        /// <li>
        /// <c>createSubtree &lt;auxiliary-tree-or-label&gt; &lt;name1&gt; [&lt;name2&gt;]</c>
        /// Create a subtree out of all the nodes from
        /// <c>&lt;name1&gt;</c>
        /// through
        /// <c>&lt;name2&gt;</c>
        /// . The subtree is moved to the foot of the given
        /// auxiliary tree, and the tree is inserted where the nodes of
        /// the subtree used to reside. If a simple label is provided as
        /// the first argument, the subtree is given a single parent with
        /// a name corresponding to the label.  To limit the operation to
        /// just one node, elide
        /// <c>&lt;name2&gt;</c>
        /// .
        /// <li>
        /// <c>adjoin &lt;auxiliary_tree&gt; &lt;name&gt;</c>
        /// Adjoins the specified auxiliary tree into the named node.
        /// The daughters of the target node will become the daughters of the foot of the auxiliary tree.
        /// <li>
        /// <c>adjoinH &lt;auxiliary_tree&gt; &lt;name&gt;</c>
        /// Similar to adjoin, but preserves the target node
        /// and makes it the root of
        /// <c>&lt;tree&gt;</c>
        /// . (It is still accessible as
        /// <c>name</c>
        /// .  The root of the
        /// auxiliary tree is ignored.)
        /// <li>
        /// <c>adjoinF &lt;auxiliary_tree&gt; &lt;name&gt;</c>
        /// Similar to adjoin,
        /// but preserves the target node and makes it the foot of
        /// <c>&lt;tree&gt;</c>
        /// .
        /// (It is still accessible as
        /// <c>name</c>
        /// , and retains its status as parent of its children.
        /// The root of the auxiliary tree is ignored.)
        /// <li> <dt>
        /// <c>coindex &lt;name1&gt; &lt;name2&gt; ... &lt;nameM&gt;</c>
        /// Puts a (Penn Treebank style)
        /// coindexation suffix of the form "-N" on each of nodes name_1 through name_m.  The value of N will be
        /// automatically generated in reference to the existing coindexations in the tree, so that there is never
        /// an accidental clash of indices across things that are not meant to be coindexed.
        /// </ul>
        /// <p>
        /// In the context of
        /// <c>adjoin</c>
        /// ,
        /// <c>adjoinH</c>
        /// ,
        /// <c>adjoinF</c>
        /// , and
        /// <c>createSubtree</c>
        /// , an auxiliary
        /// tree is a tree in Penn Treebank format with
        /// <c>@</c>
        /// on
        /// exactly one of the leaves denoting the foot of the tree.
        /// The operations which use the foot use the labeled node.
        /// For example:
        /// </p>
        /// <blockquote>
        /// Tsurgeon:
        /// <c>adjoin (FOO (BAR@)) foo</c>
        /// <br />
        /// Tregex:
        /// <c>B=foo</c>
        /// <br />
        /// Input:
        /// <c>(A (B 1 2))</c>
        /// Output:
        /// <c>(A (FOO (BAR 1 2)))</c>
        /// </blockquote>
        /// <p>
        /// Tsurgeon applies the same operation to the same tree for as long
        /// as the given tregex operation matches.  This means that infinite
        /// loops are very easy to cause.  One common situation where this comes up
        /// is with an insert operation will repeats infinitely many times
        /// unless you add an expression to the tregex that matches against
        /// the inserted pattern.  For example, this pattern will infinite loop:
        /// </p>
        /// <blockquote>
        /// <code>
        /// TregexPattern tregex = TregexPattern.compile("S=node &lt;&lt; NP"); <br />
        /// TsurgeonPattern tsurgeon = Tsurgeon.parseOperation("insert (NP foo) &gt;-1 node");
        /// </code>
        /// </blockquote>
        /// <p>
        /// This pattern, though, will terminate:
        /// </p>
        /// <blockquote>
        /// <code>
        /// TregexPattern tregex = TregexPattern.compile("S=node &lt;&lt; NP !&lt;&lt; foo"); <br />
        /// TsurgeonPattern tsurgeon = Tsurgeon.parseOperation("insert (NP foo) &gt;-1 node");
        /// </code>
        /// </blockquote>
        /// <p>
        /// Tsurgeon has (very) limited support for conditional statements.
        /// If a pattern is prefaced with
        /// <c>if exists &lt;name&gt;</c>
        /// ,
        /// the rest of the pattern will only execute if
        /// the named node was found in the corresponding TregexMatcher.
        /// </p>
        /// </remarks>
        /// <param name="args">
        /// a list of names of files each of which contains a single tregex matching pattern plus a list, one per line,
        /// of transformation operations to apply to the matched pattern.
        /// </param>
        /// <exception cref="System.Exception">If an I/O or pattern syntax error</exception>
        public static void Main(string[] args)
        {
            string headFinderClassName = null;
            string headFinderOption    = "-hf";

            string[] headFinderArgs      = null;
            string   headFinderArgOption = "-hfArg";
            string   encoding            = "UTF-8";
            string   encodingOption      = "-encoding";

            if (args.Length == 0)
            {
                log.Info("Usage: java edu.stanford.nlp.trees.tregex.tsurgeon.Tsurgeon [-s] -treeFile <file-with-trees> [-po <matching-pattern> <operation>] <operation-file-1> <operation-file-2> ... <operation-file-n>");
                System.Environment.Exit(0);
            }
            string treePrintFormats;
            string singleLineOption = "-s";
            string verboseOption    = "-v";
            string matchedOption    = "-m";
            // if set, then print original form of trees that are matched & thus operated on
            string patternOperationOption = "-po";
            string treeFileOption         = "-treeFile";
            string trfOption     = "-trf";
            string macroOption   = "-macros";
            string macroFilename = string.Empty;
            IDictionary <string, int> flagMap = Generics.NewHashMap();

            flagMap[patternOperationOption] = 2;
            flagMap[treeFileOption]         = 1;
            flagMap[trfOption]        = 1;
            flagMap[singleLineOption] = 0;
            flagMap[encodingOption]   = 1;
            flagMap[headFinderOption] = 1;
            flagMap[macroOption]      = 1;
            IDictionary <string, string[]> argsMap = StringUtils.ArgsToMap(args, flagMap);

            args = argsMap[null];
            if (argsMap.Contains(headFinderOption))
            {
                headFinderClassName = argsMap[headFinderOption][0];
            }
            if (argsMap.Contains(headFinderArgOption))
            {
                headFinderArgs = argsMap[headFinderArgOption];
            }
            if (argsMap.Contains(verboseOption))
            {
                verbose = true;
            }
            if (argsMap.Contains(singleLineOption))
            {
                treePrintFormats = "oneline,";
            }
            else
            {
                treePrintFormats = "penn,";
            }
            if (argsMap.Contains(encodingOption))
            {
                encoding = argsMap[encodingOption][0];
            }
            if (argsMap.Contains(macroOption))
            {
                macroFilename = argsMap[macroOption][0];
            }
            TreePrint          tp    = new TreePrint(treePrintFormats, new PennTreebankLanguagePack());
            PrintWriter        pwOut = new PrintWriter(new OutputStreamWriter(System.Console.Out, encoding), true);
            ITreeReaderFactory trf;

            if (argsMap.Contains(trfOption))
            {
                string trfClass = argsMap[trfOption][0];
                trf = ReflectionLoading.LoadByReflection(trfClass);
            }
            else
            {
                trf = new TregexPattern.TRegexTreeReaderFactory();
            }
            Treebank trees = new DiskTreebank(trf, encoding);

            if (argsMap.Contains(treeFileOption))
            {
                trees.LoadPath(argsMap[treeFileOption][0]);
            }
            if (trees.IsEmpty())
            {
                log.Info("Warning: No trees specified to operate on.  Use -treeFile path option.");
            }
            TregexPatternCompiler compiler;

            if (headFinderClassName == null)
            {
                compiler = new TregexPatternCompiler();
            }
            else
            {
                IHeadFinder hf;
                if (headFinderArgs == null)
                {
                    hf = ReflectionLoading.LoadByReflection(headFinderClassName);
                }
                else
                {
                    hf = ReflectionLoading.LoadByReflection(headFinderClassName, (object[])headFinderArgs);
                }
                compiler = new TregexPatternCompiler(hf);
            }
            Macros.AddAllMacros(compiler, macroFilename, encoding);
            IList <Pair <TregexPattern, TsurgeonPattern> > ops = new List <Pair <TregexPattern, TsurgeonPattern> >();

            if (argsMap.Contains(patternOperationOption))
            {
                TregexPattern   matchPattern = compiler.Compile(argsMap[patternOperationOption][0]);
                TsurgeonPattern p            = ParseOperation(argsMap[patternOperationOption][1]);
                ops.Add(new Pair <TregexPattern, TsurgeonPattern>(matchPattern, p));
            }
            else
            {
                foreach (string arg in args)
                {
                    IList <Pair <TregexPattern, TsurgeonPattern> > pairs = GetOperationsFromFile(arg, encoding, compiler);
                    foreach (Pair <TregexPattern, TsurgeonPattern> pair in pairs)
                    {
                        if (verbose)
                        {
                            log.Info(pair.Second());
                        }
                        ops.Add(pair);
                    }
                }
            }
            foreach (Tree t in trees)
            {
                Tree original = t.DeepCopy();
                Tree result   = ProcessPatternsOnTree(ops, t);
                if (argsMap.Contains(matchedOption) && matchedOnTree)
                {
                    pwOut.Println("Operated on: ");
                    DisplayTree(original, tp, pwOut);
                    pwOut.Println("Result: ");
                }
                DisplayTree(result, tp, pwOut);
            }
        }
Example #32
0
 public virtual void Display(bool verbose, PrintWriter pw)
 {
     countingEval.Display(verbose, pw);
 }
Example #33
0
        /// <summary>
        /// Writes a list of StateCU_BlaneyCriddle objects to a list file.  A header is
        /// printed to the top of the file, containing the commands used to generate the
        /// file.  Any strings in the body of the file that contain the field delimiter will be wrapped in "...". </summary>
        /// <param name="filename"> the name of the file to which the data will be written. </param>
        /// <param name="delimiter"> the delimiter to use for separating field values. </param>
        /// <param name="update"> whether to update an existing file, retaining the current
        /// header (true) or to create a new file with a new header. </param>
        /// <param name="data"> the Vector of objects to write. </param>
        /// <exception cref="Exception"> if an error occurs. </exception>
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: public static void writeListFile(String filename, String delimiter, boolean update, java.util.List<StateCU_BlaneyCriddle> data, java.util.List<String> outputComments) throws Exception
        public static void writeListFile(string filename, string delimiter, bool update, IList <StateCU_BlaneyCriddle> data, IList <string> outputComments)
        {
            string routine = "StateCU_BlaneyCriddle.writeListFile";
            int    size    = 0;

            if (data != null)
            {
                size = data.Count;
            }

            IList <string> fields = new List <string>();

            fields.Add("Name");
            fields.Add("CurveType");
            fields.Add("DayPercent");
            fields.Add("Coefficient");
            int fieldCount = fields.Count;

            string[] names   = new string[fieldCount];
            string[] formats = new string[fieldCount];
            int      comp    = StateCU_DataSet.COMP_BLANEY_CRIDDLE;
            string   s       = null;

            for (int i = 0; i < fieldCount; i++)
            {
                s          = (string)fields[i];
                names[i]   = StateCU_Util.lookupPropValue(comp, "FieldName", s);
                formats[i] = StateCU_Util.lookupPropValue(comp, "Format", s);
            }

            string oldFile = null;

            if (update)
            {
                oldFile = IOUtil.getPathUsingWorkingDir(filename);
            }

            int                   j             = 0;
            int                   k             = 0;
            PrintWriter           @out          = null;
            StateCU_BlaneyCriddle bc            = null;
            IList <string>        commentString = new List <string>(1);

            commentString.Add("#");
            IList <string> ignoreCommentString = new List <string>(1);

            ignoreCommentString.Add("#>");
            string[]      line   = new string[fieldCount];
            string        flag   = null;
            StringBuilder buffer = new StringBuilder();

            try
            {
                // Add some basic comments at the top of the file.  However, do this to a copy of the
                // incoming comments so that they are not modified in the calling code.
                IList <string> newComments2 = null;
                if (outputComments == null)
                {
                    newComments2 = new List <string>();
                }
                else
                {
                    newComments2 = new List <string>(outputComments);
                }
                newComments2.Insert(0, "");
                newComments2.Insert(1, "StateCU Blaney-Criddle crop coefficients as a delimited list file.");
                newComments2.Insert(2, "");
                @out = IOUtil.processFileHeaders(oldFile, IOUtil.getPathUsingWorkingDir(filename), newComments2, commentString, ignoreCommentString, 0);

                for (int i = 0; i < fieldCount; i++)
                {
                    if (i > 0)
                    {
                        buffer.Append(delimiter);
                    }
                    buffer.Append("\"" + names[i] + "\"");
                }

                @out.println(buffer.ToString());

                for (int i = 0; i < size; i++)
                {
                    bc   = (StateCU_BlaneyCriddle)data[i];
                    flag = bc.getFlag();
                    if (flag.Equals("Percent", StringComparison.OrdinalIgnoreCase))
                    {
                        for (j = 0; j < 21; j++)
                        {
                            line[0] = StringUtil.formatString(bc.getName(), formats[0]).Trim();
                            line[1] = StringUtil.formatString(bc.getFlag(), formats[1]).Trim();
                            line[2] = StringUtil.formatString(bc.getNckca(j), formats[2]).Trim();
                            line[3] = StringUtil.formatString(bc.getCkca(j), formats[3]).Trim();

                            buffer = new StringBuilder();
                            for (k = 0; k < fieldCount; k++)
                            {
                                if (k > 0)
                                {
                                    buffer.Append(delimiter);
                                }
                                if (line[k].IndexOf(delimiter, StringComparison.Ordinal) > -1)
                                {
                                    line[k] = "\"" + line[k] + "\"";
                                }
                                buffer.Append(line[k]);
                            }
                            @out.println(buffer.ToString());
                        }
                    }
                    else
                    {
                        for (j = 0; j < 25; j++)
                        {
                            line[0] = StringUtil.formatString(bc.getName(), formats[0]).Trim();
                            line[1] = StringUtil.formatString(bc.getFlag(), formats[1]).Trim();
                            line[2] = StringUtil.formatString(bc.getNckcp(j), formats[2]).Trim();
                            line[3] = StringUtil.formatString(bc.getCkcp(j), formats[3]).Trim();

                            buffer = new StringBuilder();
                            for (k = 0; k < fieldCount; k++)
                            {
                                if (k > 0)
                                {
                                    buffer.Append(delimiter);
                                }
                                if (line[k].IndexOf(delimiter, StringComparison.Ordinal) > -1)
                                {
                                    line[k] = "\"" + line[k] + "\"";
                                }
                                buffer.Append(line[k]);
                            }
                            @out.println(buffer.ToString());
                        }
                    }
                }
            }
            catch (Exception e)
            {
                Message.printWarning(3, routine, e);
                throw e;
            }
            finally
            {
                if (@out != null)
                {
                    @out.flush();
                    @out.close();
                }
                @out = null;
            }
        }
Example #34
0
		public static void PrintStackTrace(Exception e, PrintWriter writer)
		{
 			writer.Println(e.ToString());
 		}
Example #35
0
        private void DoTestSearch(Random random, PrintWriter @out, bool useCompoundFile)
        {
            Directory directory = newDirectory();
              Analyzer analyzer = new MockAnalyzer(random);
              IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
              MergePolicy mp = conf.MergePolicy;
              mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
              IndexWriter writer = new IndexWriter(directory, conf);

              string[] docs = new string[] {"a b c d e", "a b c d e a b c d e", "a b c d e f g h i j", "a c e", "e c a", "a c e a c e", "a c e a b c"};
              for (int j = 0; j < docs.Length; j++)
              {
            Document d = new Document();
            d.add(newTextField("contents", docs[j], Field.Store.YES));
            d.add(newStringField("id", "" + j, Field.Store.NO));
            writer.addDocument(d);
              }
              writer.close();

              IndexReader reader = DirectoryReader.open(directory);
              IndexSearcher searcher = newSearcher(reader);

              ScoreDoc[] hits = null;

              Sort sort = new Sort(SortField.FIELD_SCORE, new SortField("id", SortField.Type.INT));

              foreach (Query query in BuildQueries())
              {
            @out.println("Query: " + query.ToString("contents"));
            if (VERBOSE)
            {
              Console.WriteLine("TEST: query=" + query);
            }

            hits = searcher.search(query, null, 1000, sort).scoreDocs;

            @out.println(hits.Length + " total results");
            for (int i = 0 ; i < hits.Length && i < 10; i++)
            {
              Document d = searcher.doc(hits[i].doc);
              @out.println(i + " " + hits[i].score + " " + d.get("contents"));
            }
              }
              reader.close();
              directory.close();
        }
Example #36
0
        /// <summary>
        /// Write a list of StateCU_BlaneyCriddle to an opened file. </summary>
        /// <param name="data_Vector"> A Vector of StateCU_BlaneyCriddle to write. </param>
        /// <param name="out"> output PrintWriter. </param>
        /// <param name="props"> Properties to control the output.  Currently only the
        /// optional Precision property can be set, indicating how many digits after the
        /// decimal should be printed (default is 3). </param>
        /// <exception cref="IOException"> if an error occurs. </exception>
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: private static void writeVector(java.util.List<StateCU_BlaneyCriddle> data_Vector, java.io.PrintWriter out, RTi.Util.IO.PropList props) throws java.io.IOException
        private static void writeVector(IList <StateCU_BlaneyCriddle> data_Vector, PrintWriter @out, PropList props)
        {
            int    i, j;
            string cmnt = "#>";
            // Missing data are handled by formatting all as strings (blank if necessary).
            bool version10 = false;     // Indicate if old Version 10 format is written

            if (props == null)
            {
                props = new PropList("StateCU_BlaneyCriddle");
            }
            string Precision = props.getValue("Precision");
            string Version   = props.getValue("Version");

            if (!string.ReferenceEquals(Version, null) && Version.Equals("10"))
            {
                // Version 10 is an older version.
                version10 = true;
            }

            int Precision_int = 3;

            if ((!string.ReferenceEquals(Precision, null)) && StringUtil.isInteger(Precision))
            {
                Precision_int = StringUtil.atoi(Precision);
            }

            @out.println(cmnt);
            @out.println(cmnt + "  StateCU Blaney-Criddle Crop Coefficient (KBC) File");
            @out.println(cmnt);
            @out.println(cmnt + "  Record 1 format (a80)");
            @out.println(cmnt);
            @out.println(cmnt + "  Title     remark:  Title");
            @out.println(cmnt);
            @out.println(cmnt + "  Record 2 format (free format)");
            @out.println(cmnt);
            @out.println(cmnt + "  NumCurves     nc:  Number of crop coefficient curves");
            @out.println(cmnt);
            @out.println(cmnt + "  Record 3 format (free format)");
            @out.println(cmnt);
            @out.println(cmnt + "  ID            id:  Crop number (not used by StateCU)");
            @out.println(cmnt + "  CropName   cropn:  Crop name (e.g., ALFALFA)");
            @out.println(cmnt + "  CurveType   flag:  Growth curve type");
            @out.println(cmnt + "                     Day = perennial; specify 25 values");
            @out.println(cmnt + "                           for start, middle, end of month");
            @out.println(cmnt + "                     Percent = annual; specify 21 values");
            @out.println(cmnt + "                           for 0, 5, ..., 100% of season");
            @out.println(cmnt);
            if (!version10)
            {
                // Include newer format information...
                @out.println(cmnt + "  BCMethod    ktsw:  Blaney-Criddle Method");
                @out.println(cmnt + "                     0 = SCS Modified Blaney-Criddle");
                @out.println(cmnt + "                     1 = Original Blaney-Criddle");
                @out.println(cmnt + "                     2 = Modifed Blaney-Criddle w/ Elev. Adj.");
                @out.println(cmnt + "                     3 = Original Blaney-Criddle w/ Elev. Adj.");
                @out.println(cmnt + "                     4 = Pochop");
                @out.println(cmnt);
            }
            @out.println(cmnt + "  Record 4 format (free format)");
            @out.println(cmnt);
            @out.println(cmnt + "Position     nckca:  Percent (0 to 100) of growing season for annual crop");
            @out.println(cmnt + "             nckcp:  Day of year (1 to 366) for perennial crop");
            @out.println(cmnt + "Coeff         ckca:  Crop coefficient for annual crop");
            @out.println(cmnt + "         OR   ckcp:  Crop coefficient for perennial crop");
            @out.println(cmnt);
            @out.println(cmnt + "Title...");
            @out.println(cmnt + "NumCurves");
            @out.println(cmnt + "ID CropName CurveType");
            @out.println(cmnt + "Position Coeff");
            @out.println(cmnt + "----------------------------");
            @out.println(cmnt + "EndHeader");
            @out.println("Crop Coefficient Curves for Blaney-Criddle");

            int num = 0;

            if (data_Vector != null)
            {
                num = data_Vector.Count;
            }
            @out.println(num);
            StateCU_BlaneyCriddle kbc = null;

            int[]    nckca        = null;
            int[]    nckcp        = null;
            double[] ckca         = null;
            double[] ckcp         = null;
            int      size         = 0;
            string   value_format = "%9." + Precision_int + "f";

            for (i = 0; i < num; i++)
            {
                kbc = (StateCU_BlaneyCriddle)data_Vector[i];
                if (kbc == null)
                {
                    continue;
                }

                // Just get all the data.  Null arrays are used as a check
                // below to know what data to output...
                nckca = kbc.getNckca();
                nckcp = kbc.getNckcp();
                ckca  = kbc.getCkca();
                ckcp  = kbc.getCkcp();

                // Do not truncate the name to 20 characters if version 10 because
                // doing so may result in arbitrary cut of the current crop names and
                // result in different output from old anyhow.
                string name = kbc.getName();
                // Since free format, the ID must always have something.  If
                // we don't know, put -999...
                string id = "" + (i + 1);         // Default to sequential number
                if (version10)
                {
                    // Previously used -999
                    id = "-999";
                }
                if (!StateCU_Util.isMissing(kbc.getID()))
                {
                    // Changes elsewhere impact this so also use -999 unless it is a number
                    if (StringUtil.isInteger(kbc.getID()))
                    {
                        id = "" + kbc.getID();
                    }
                    else
                    {
                        id = "-999";
                    }
                    // Can't use the crop name because StateCU expects a number (?)
                    //id = kbc.getID();
                }
                // Output based on the version because file comparisons may be done when verifying files.
                if (version10)
                {
                    // No ktsw...
                    @out.println(id + " " + name + " " + kbc.getFlag());
                }
                else
                {
                    // With ktsw, but OK if blank.
                    @out.println(id + " " + name + " " + kbc.getFlag() + " " + kbc.getKtsw());
                }

                if (nckca != null)
                {
                    size = nckca.Length;
                }
                else
                {
                    size = nckcp.Length;
                }
                for (j = 0; j < size; j++)
                {
                    if (nckca != null)
                    {
                        // Print annual curve (Percent)...
                        @out.println(StringUtil.formatString(nckca[j], "%-3d") + StringUtil.formatString(ckca[j], value_format));
                    }
                    else
                    {
                        // Print perennial curve (Day)...
                        @out.println(StringUtil.formatString((int)nckcp[j], "%-3d") + StringUtil.formatString(ckcp[j], value_format));
                    }
                }
            }
        }
Example #37
0
        /// <summary>Run the scoring metric on guess/gold input.</summary>
        /// <remarks>
        /// Run the scoring metric on guess/gold input. This method performs "Collinization."
        /// The default language is English.
        /// </remarks>
        /// <param name="args"/>
        public static void Main(string[] args)
        {
            if (args.Length < minArgs)
            {
                System.Console.Out.WriteLine(usage.ToString());
                System.Environment.Exit(-1);
            }
            ITreebankLangParserParams tlpp = new EnglishTreebankParserParams();
            int    maxGoldYield            = int.MaxValue;
            bool   Verbose   = false;
            string encoding  = "UTF-8";
            string guessFile = null;
            string goldFile  = null;
            IDictionary <string, string[]> argsMap = StringUtils.ArgsToMap(args, optionArgDefs);

            foreach (KeyValuePair <string, string[]> opt in argsMap)
            {
                if (opt.Key == null)
                {
                    continue;
                }
                if (opt.Key.Equals("-l"))
                {
                    Language lang = Language.ValueOf(opt.Value[0].Trim());
                    tlpp = lang.@params;
                }
                else
                {
                    if (opt.Key.Equals("-y"))
                    {
                        maxGoldYield = System.Convert.ToInt32(opt.Value[0].Trim());
                    }
                    else
                    {
                        if (opt.Key.Equals("-v"))
                        {
                            Verbose = true;
                        }
                        else
                        {
                            if (opt.Key.Equals("-c"))
                            {
                                Edu.Stanford.Nlp.Parser.Metrics.TaggingEval.doCatLevelEval = true;
                            }
                            else
                            {
                                if (opt.Key.Equals("-e"))
                                {
                                    encoding = opt.Value[0];
                                }
                                else
                                {
                                    log.Info(usage.ToString());
                                    System.Environment.Exit(-1);
                                }
                            }
                        }
                    }
                }
                //Non-option arguments located at key null
                string[] rest = argsMap[null];
                if (rest == null || rest.Length < minArgs)
                {
                    log.Info(usage.ToString());
                    System.Environment.Exit(-1);
                }
                goldFile  = rest[0];
                guessFile = rest[1];
            }
            tlpp.SetInputEncoding(encoding);
            PrintWriter pwOut         = tlpp.Pw();
            Treebank    guessTreebank = tlpp.DiskTreebank();

            guessTreebank.LoadPath(guessFile);
            pwOut.Println("GUESS TREEBANK:");
            pwOut.Println(guessTreebank.TextualSummary());
            Treebank goldTreebank = tlpp.DiskTreebank();

            goldTreebank.LoadPath(goldFile);
            pwOut.Println("GOLD TREEBANK:");
            pwOut.Println(goldTreebank.TextualSummary());
            Edu.Stanford.Nlp.Parser.Metrics.TaggingEval metric = new Edu.Stanford.Nlp.Parser.Metrics.TaggingEval("Tagging LP/LR");
            ITreeTransformer tc = tlpp.Collinizer();
            //The evalb ref implementation assigns status for each tree pair as follows:
            //
            //   0 - Ok (yields match)
            //   1 - length mismatch
            //   2 - null parse e.g. (()).
            //
            //In the cases of 1,2, evalb does not include the tree pair in the LP/LR computation.
            IEnumerator <Tree> goldItr  = goldTreebank.GetEnumerator();
            IEnumerator <Tree> guessItr = guessTreebank.GetEnumerator();
            int goldLineId        = 0;
            int guessLineId       = 0;
            int skippedGuessTrees = 0;

            while (guessItr.MoveNext() && goldItr.MoveNext())
            {
                Tree           guessTree  = guessItr.Current;
                IList <ILabel> guessYield = guessTree.Yield();
                guessLineId++;
                Tree           goldTree  = goldItr.Current;
                IList <ILabel> goldYield = goldTree.Yield();
                goldLineId++;
                // Check that we should evaluate this tree
                if (goldYield.Count > maxGoldYield)
                {
                    skippedGuessTrees++;
                    continue;
                }
                // Only trees with equal yields can be evaluated
                if (goldYield.Count != guessYield.Count)
                {
                    pwOut.Printf("Yield mismatch gold: %d tokens vs. guess: %d tokens (lines: gold %d guess %d)%n", goldYield.Count, guessYield.Count, goldLineId, guessLineId);
                    skippedGuessTrees++;
                    continue;
                }
                Tree evalGuess = tc.TransformTree(guessTree);
                Tree evalGold  = tc.TransformTree(goldTree);
                metric.Evaluate(evalGuess, evalGold, ((Verbose) ? pwOut : null));
            }
            if (guessItr.MoveNext() || goldItr.MoveNext())
            {
                System.Console.Error.Printf("Guess/gold files do not have equal lengths (guess: %d gold: %d)%n.", guessLineId, goldLineId);
            }
            pwOut.Println("================================================================================");
            if (skippedGuessTrees != 0)
            {
                pwOut.Printf("%s %d guess trees\n", "Unable to evaluate", skippedGuessTrees);
            }
            metric.Display(true, pwOut);
            pwOut.Println();
            pwOut.Close();
        }
Example #38
0
        /// <summary>
        /// Writes a list of StateMod_ReservoirAreaCap objects to a list file.  A header
        /// is printed to the top of the file, containing the commands used to generate the
        /// file.  Any strings in the body of the file that contain the field delimiter will be wrapped in "...". </summary>
        /// <param name="filename"> the name of the file to which the data will be written. </param>
        /// <param name="delimiter"> the delimiter to use for separating field values. </param>
        /// <param name="update"> whether to update an existing file, retaining the current
        /// header (true) or to create a new file with a new header. </param>
        /// <param name="data"> the list of objects to write. </param>
        /// <param name="newComments"> new comments to add at the top of the file. </param>
        /// <exception cref="Exception"> if an error occurs. </exception>
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: public static void writeListFile(String filename, String delimiter, boolean update, java.util.List<StateMod_ReservoirAreaCap> data, java.util.List<String> newComments) throws Exception
        public static void writeListFile(string filename, string delimiter, bool update, IList <StateMod_ReservoirAreaCap> data, IList <string> newComments)
        {
            string routine = "StateMod_ReservoirAreaCap.writeListFile";
            int    size    = 0;

            if (data != null)
            {
                size = data.Count;
            }

            IList <string> fields = new List <string>();

            fields.Add("ReservoirID");
            fields.Add("Content");
            fields.Add("Area");
            fields.Add("Seepage");
            int fieldCount = fields.Count;

            string[] names   = new string[fieldCount];
            string[] formats = new string[fieldCount];
            int      comp    = StateMod_Util.COMP_RESERVOIR_AREA_CAP;
            string   s       = null;

            for (int i = 0; i < fieldCount; i++)
            {
                s          = (string)fields[i];
                names[i]   = StateMod_Util.lookupPropValue(comp, "FieldName", s);
                formats[i] = StateMod_Util.lookupPropValue(comp, "Format", s);
            }

            string oldFile = null;

            if (update)
            {
                oldFile = IOUtil.getPathUsingWorkingDir(filename);
            }

            int         j    = 0;
            PrintWriter @out = null;
            StateMod_ReservoirAreaCap area = null;

            string[]       line = new string[fieldCount];
            IList <string> commentIndicators = new List <string>(1);

            commentIndicators.Add("#");
            IList <string> ignoredCommentIndicators = new List <string>(1);

            ignoredCommentIndicators.Add("#>");
            StringBuilder buffer = new StringBuilder();

            try
            {
                // Add some basic comments at the top of the file.  Do this to a copy of the
                // incoming comments so that they are not modified in the calling code.
                IList <string> newComments2 = null;
                if (newComments == null)
                {
                    newComments2 = new List <string>();
                }
                else
                {
                    newComments2 = new List <string>(newComments);
                }
                newComments2.Insert(0, "");
                newComments2.Insert(1, "StateMod reservoir content/area/seepage data as a delimited list file.");
                newComments2.Insert(2, "See also the associated station, account, precipitation station,");
                newComments2.Insert(3, "evaporation station, and collection files.");
                newComments2.Insert(4, "");
                @out = IOUtil.processFileHeaders(oldFile, IOUtil.getPathUsingWorkingDir(filename), newComments2, commentIndicators, ignoredCommentIndicators, 0);

                for (int i = 0; i < fieldCount; i++)
                {
                    if (i > 0)
                    {
                        buffer.Append(delimiter);
                    }
                    buffer.Append("\"" + names[i] + "\"");
                }

                @out.println(buffer.ToString());

                for (int i = 0; i < size; i++)
                {
                    area = (StateMod_ReservoirAreaCap)data[i];

                    line[0] = StringUtil.formatString(area.getCgoto(), formats[0]).Trim();
                    line[1] = StringUtil.formatString(area.getConten(), formats[1]).Trim();
                    line[2] = StringUtil.formatString(area.getSurarea(), formats[2]).Trim();
                    line[3] = StringUtil.formatString(area.getSeepage(), formats[3]).Trim();

                    buffer = new StringBuilder();
                    for (j = 0; j < fieldCount; j++)
                    {
                        if (j > 0)
                        {
                            buffer.Append(delimiter);
                        }
                        if (line[j].IndexOf(delimiter, StringComparison.Ordinal) > -1)
                        {
                            line[j] = "\"" + line[j] + "\"";
                        }
                        buffer.Append(line[j]);
                    }

                    @out.println(buffer.ToString());
                }
            }
            catch (Exception e)
            {
                Message.printWarning(3, routine, e);
                throw e;
            }
            finally
            {
                if (@out != null)
                {
                    @out.flush();
                    @out.close();
                }
            }
        }
		public PrintWriterLoggerInterceptor(PrintWriter @out)
		{
			this.@out = @out;
		}