/// <summary>Writes out data from this Object.</summary> /// <param name="w">Data is written to this Writer</param> public virtual void WriteData(TextWriter w) { PrintWriter @out = new PrintWriter(w); // all lines have one rule per line foreach (UnaryRule ur in this) { @out.Println(ur.ToString(index)); } @out.Flush(); }
private static void DisplayTree(Tree t, TreePrint tp, PrintWriter pw) { if (t == null) { pw.Println("null"); } else { tp.PrintTree(t, pw); } }
private static void ReportError(string yytext) { try { PrintWriter p = new PrintWriter(new OutputStreamWriter(System.Console.Error, "utf-8"), true); p.Println("chtbl.flex tokenization error: \"" + yytext + "\""); if (yytext.Length >= 1) { p.Println("First character is: " + yytext[0]); if (yytext.Length >= 2) { p.Println("Second character is: " + yytext[1]); } } } catch (UnsupportedEncodingException) { System.Console.Error.WriteLine("chtbl.flex tokenization and encoding present error"); } }
public override void Evaluate(Tree t1, Tree t2, PrintWriter pw) { ICollection <Constituent> b1 = ((ICollection <Constituent>)MakeObjects(t1)); ICollection <Constituent> b2 = ((ICollection <Constituent>)MakeObjects(t2)); CheckCrossing(b1, b2); if (pw != null && runningAverages) { pw.Println("AvgCB: " + ((int)(10000.0 * cb / num)) / 100.0 + " ZeroCB: " + ((int)(10000.0 * zeroCB / num)) / 100.0 + " N: " + GetNum()); } }
/// <summary>Reads a file from the argument and prints its tokens one per line.</summary> /// <remarks> /// Reads a file from the argument and prints its tokens one per line. /// This is mainly as a testing aid, but it can also be quite useful /// standalone to turn a corpus into a one token per line file of tokens. /// Usage: /// <c>java edu.stanford.nlp.process.WhitespaceTokenizer filename</c> /// </remarks> /// <param name="args">Command line arguments</param> /// <exception cref="System.IO.IOException">If can't open files, etc.</exception> public static void Main(string[] args) { bool eolIsSignificant = (args.Length > 0 && args[0].Equals("-cr")); Reader reader = ((args.Length > 0 && !args[args.Length - 1].Equals("-cr")) ? new InputStreamReader(new FileInputStream(args[args.Length - 1]), "UTF-8") : new InputStreamReader(Runtime.@in, "UTF-8")); WhitespaceTokenizer <Word> tokenizer = new WhitespaceTokenizer <Word>(new WordTokenFactory(), reader, eolIsSignificant); PrintWriter pw = new PrintWriter(new OutputStreamWriter(System.Console.Out, "UTF-8"), true); while (tokenizer.MoveNext()) { Word w = tokenizer.Current; if (w.Value().Equals(WhitespaceLexer.Newline)) { pw.Println("***CR***"); } else { pw.Println(w); } } }
public ParseFiles(Options op, TreePrint treePrint, LexicalizedParser pqFactory) { this.op = op; this.pqFactory = pqFactory; this.treePrint = treePrint; this.tlp = op.tlpParams.TreebankLanguagePack(); this.pwOut = op.tlpParams.Pw(); this.pwErr = op.tlpParams.Pw(System.Console.Error); if (op.testOptions.verbose) { pwErr.Println("Sentence final words are: " + Arrays.AsList(tlp.SentenceFinalPunctuationWords())); pwErr.Println("File encoding is: " + op.tlpParams.GetInputEncoding()); } // evaluation setup this.runningAverages = bool.ParseBoolean(op.testOptions.evals.GetProperty("runningAverages")); this.summary = bool.ParseBoolean(op.testOptions.evals.GetProperty("summary")); if (bool.ParseBoolean(op.testOptions.evals.GetProperty("pcfgLL"))) { this.pcfgLL = new AbstractEval.ScoreEval("pcfgLL", runningAverages); } else { this.pcfgLL = null; } if (bool.ParseBoolean(op.testOptions.evals.GetProperty("depLL"))) { this.depLL = new AbstractEval.ScoreEval("depLL", runningAverages); } else { this.depLL = null; } if (bool.ParseBoolean(op.testOptions.evals.GetProperty("factLL"))) { this.factLL = new AbstractEval.ScoreEval("factLL", runningAverages); } else { this.factLL = null; } }
public virtual void PrintAnswers(IList <CoreLabel> doc, PrintWriter pw) { string prevAnswer = "O"; string prevClass = string.Empty; string afterLast = string.Empty; foreach (CoreLabel word in doc) { if (!prevAnswer.Equals("O") && !prevAnswer.Equals(word.Get(typeof(CoreAnnotations.AnswerAnnotation)))) { pw.Print("</" + prevClass + ">"); prevClass = string.Empty; } pw.Print(word.Get(typeof(CoreAnnotations.BeforeAnnotation))); if (!word.Get(typeof(CoreAnnotations.AnswerAnnotation)).Equals("O") && !word.Get(typeof(CoreAnnotations.AnswerAnnotation)).Equals(prevAnswer)) { if (Sharpen.Runtime.EqualsIgnoreCase(word.Get(typeof(CoreAnnotations.AnswerAnnotation)), "PERSON") || Sharpen.Runtime.EqualsIgnoreCase(word.Get(typeof(CoreAnnotations.AnswerAnnotation)), "ORGANIZATION") || Sharpen.Runtime.EqualsIgnoreCase(word .Get(typeof(CoreAnnotations.AnswerAnnotation)), "LOCATION")) { prevClass = "ENAMEX"; } else { if (Sharpen.Runtime.EqualsIgnoreCase(word.Get(typeof(CoreAnnotations.AnswerAnnotation)), "DATE") || Sharpen.Runtime.EqualsIgnoreCase(word.Get(typeof(CoreAnnotations.AnswerAnnotation)), "TIME")) { prevClass = "TIMEX"; } else { if (Sharpen.Runtime.EqualsIgnoreCase(word.Get(typeof(CoreAnnotations.AnswerAnnotation)), "PERCENT") || Sharpen.Runtime.EqualsIgnoreCase(word.Get(typeof(CoreAnnotations.AnswerAnnotation)), "MONEY")) { prevClass = "NUMEX"; } else { log.Info("unknown type: " + word.Get(typeof(CoreAnnotations.AnswerAnnotation))); System.Environment.Exit(0); } } } pw.Print("<" + prevClass + " TYPE=\"" + word.Get(typeof(CoreAnnotations.AnswerAnnotation)) + "\">"); } pw.Print(word.Get(typeof(CoreAnnotations.OriginalTextAnnotation))); afterLast = word.Get(typeof(CoreAnnotations.AfterAnnotation)); prevAnswer = word.Get(typeof(CoreAnnotations.AnswerAnnotation)); } if (!prevAnswer.Equals("O")) { pw.Print("</" + prevClass + ">"); prevClass = string.Empty; } pw.Println(afterLast); }
public virtual void PrintAnswers(IList <CoreLabel> doc, PrintWriter @out) { IList <string> sentence = new List <string>(); int wrong = 0; foreach (CoreLabel wi in doc) { StringBuilder sb = new StringBuilder(); if (!wi.Get(typeof(CoreAnnotations.AnswerAnnotation)).Equals(wi.Get(typeof(CoreAnnotations.GoldAnswerAnnotation)))) { wrong++; } if (!ThreeClasses && wi.Get(typeof(CoreAnnotations.AnswerAnnotation)).Equals("UPPER")) { sb.Append(wi.Word().ToUpper()); } else { if (wi.Get(typeof(CoreAnnotations.AnswerAnnotation)).Equals("LOWER")) { sb.Append(wi.Word().ToLower()); } else { if (wi.Get(typeof(CoreAnnotations.AnswerAnnotation)).Equals("INIT_UPPER")) { sb.Append(Sharpen.Runtime.Substring(wi.Word(), 0, 1).ToUpper()).Append(Sharpen.Runtime.Substring(wi.Word(), 1)); } else { if (wi.Get(typeof(CoreAnnotations.AnswerAnnotation)).Equals("O")) { // in this case, if it contains a-z at all, then append "MIX" at the end sb.Append(wi.Word()); Matcher alphaMatcher = alphabet.Matcher(wi.Word()); if (alphaMatcher.Matches()) { sb.Append("/MIX"); } } } } } if (verboseForTrueCasing) { sb.Append("/GOLD-").Append(wi.Get(typeof(CoreAnnotations.GoldAnswerAnnotation))).Append("/GUESS-").Append(wi.Get(typeof(CoreAnnotations.AnswerAnnotation))); } sentence.Add(sb.ToString()); } @out.Print(StringUtils.Join(sentence, " ")); System.Console.Error.Printf("> wrong = %d ; total = %d%n", wrong, doc.Count); @out.Println(); }
private void PrettyPrint(PrintWriter pw, string buffer, string bufferIncrement) { if (depth == 1) { foreach (KeyValuePair <object, double> e in EntrySet()) { object key = e.Key; double count = e.Value; pw.Println(buffer + key + "\t" + count); } } else { foreach (K key in TopLevelKeySet()) { GeneralizedCounter <K> gc1 = Conditionalize(Arrays.AsList(ErasureUtils.UncheckedCast <K[]>(new object[] { key }))); pw.Println(buffer + key + "\t" + gc1.TotalCount()); gc1.PrettyPrint(pw, buffer + bufferIncrement, bufferIncrement); } } }
private static void EmitSortedTrees(PriorityQueue <Triple <double, Tree, Tree> > queue, int worstKTreesToEmit, string filePrefix) { if (queue == null) { log.Info("Queue was not initialized properly"); } try { PrintWriter guessPw = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filePrefix + ".kworst.guess"), "UTF-8"))); PrintWriter goldPw = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filePrefix + ".kworst.gold"), "UTF-8"))); IConstituentFactory cFact = new LabeledScoredConstituentFactory(); PrintWriter guessDepPw = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filePrefix + ".kworst.guess.deps"), "UTF-8"))); PrintWriter goldDepPw = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filePrefix + ".kworst.gold.deps"), "UTF-8"))); System.Console.Out.Printf("F1s of %d worst trees:\n", worstKTreesToEmit); for (int i = 0; queue.Peek() != null && i < worstKTreesToEmit; i++) { Triple <double, Tree, Tree> trees = queue.Poll(); System.Console.Out.WriteLine(trees.First()); //Output the trees goldPw.Println(trees.Second().ToString()); guessPw.Println(trees.Third().ToString()); //Output the set differences ICollection <Constituent> goldDeps = Generics.NewHashSet(); Sharpen.Collections.AddAll(goldDeps, trees.Second().Constituents(cFact)); goldDeps.RemoveAll(trees.Third().Constituents(cFact)); foreach (Constituent c in goldDeps) { goldDepPw.Print(c.ToString() + " "); } goldDepPw.Println(); ICollection <Constituent> guessDeps = Generics.NewHashSet(); Sharpen.Collections.AddAll(guessDeps, trees.Third().Constituents(cFact)); guessDeps.RemoveAll(trees.Second().Constituents(cFact)); foreach (Constituent c_1 in guessDeps) { guessDepPw.Print(c_1.ToString() + " "); } guessDepPw.Println(); } guessPw.Close(); goldPw.Close(); goldDepPw.Close(); guessDepPw.Close(); } catch (UnsupportedEncodingException e) { Sharpen.Runtime.PrintStackTrace(e); } catch (FileNotFoundException e) { Sharpen.Runtime.PrintStackTrace(e); } }
private void PrettyPrint(PrintWriter pw, int indent) { for (int i = 0; i < indent; i++) { pw.Print(" "); } pw.Println(LocalString()); foreach (Edu.Stanford.Nlp.Semgraph.Semgrex.SemgrexPattern child in GetChildren()) { child.PrettyPrint(pw, indent + 1); } }
// put a single newline at the end [added 20091024]. private static void PrintAnswersAsIsTextTsv <In>(IList <In> l, PrintWriter @out) where In : ICoreMap { foreach (IN wi in l) { @out.Print(StringUtils.GetNotNullString(wi.Get(typeof(CoreAnnotations.BeforeAnnotation)))); @out.Print(StringUtils.GetNotNullString(wi.Get(typeof(CoreAnnotations.TextAnnotation)))); @out.Print(StringUtils.GetNotNullString(wi.Get(typeof(CoreAnnotations.AfterAnnotation)))); @out.Print('\t'); @out.Println(StringUtils.GetNotNullString(wi.Get(typeof(CoreAnnotations.AnswerAnnotation)))); } }
private static void PrintAnswersTokenizedText <In>(IList <In> l, PrintWriter @out) where In : ICoreMap { foreach (IN wi in l) { @out.Print(StringUtils.GetNotNullString(wi.Get(typeof(CoreAnnotations.TextAnnotation)))); @out.Print('/'); @out.Print(StringUtils.GetNotNullString(wi.Get(typeof(CoreAnnotations.AnswerAnnotation)))); @out.Print(' '); } @out.Println(); }
internal static void SaveSegmenterDataToText(Edu.Stanford.Nlp.Parser.Lexparser.ChineseLexiconAndWordSegmenter cs, string filename) { try { log.Info("Writing parser in text grammar format to file " + filename); OutputStream os; if (filename.EndsWith(".gz")) { // it's faster to do the buffering _outside_ the gzipping as here os = new BufferedOutputStream(new GZIPOutputStream(new FileOutputStream(filename))); } else { os = new BufferedOutputStream(new FileOutputStream(filename)); } PrintWriter @out = new PrintWriter(os); string prefix = "BEGIN "; // out.println(prefix + "OPTIONS"); // if (pd.pt != null) { // pd.pt.writeData(out); // } // out.println(); // log.info("."); @out.Println(prefix + "LEXICON"); if (cs != null) { cs.WriteData(@out); } @out.Println(); log.Info("."); @out.Flush(); @out.Close(); log.Info("done."); } catch (IOException e) { log.Info("Trouble saving segmenter data to ASCII format."); Sharpen.Runtime.PrintStackTrace(e); } }
/// <summary>Segment input and write to output stream.</summary> /// <param name="segmenter"/> /// <param name="br"/> /// <param name="pwOut"/> /// <param name="nThreads"/> /// <returns>input characters processed per second</returns> private static double Decode(Edu.Stanford.Nlp.International.Arabic.Process.ArabicSegmenter segmenter, BufferedReader br, PrintWriter pwOut, int nThreads) { System.Diagnostics.Debug.Assert(nThreads > 0); long nChars = 0; long startTime = Runtime.NanoTime(); if (nThreads > 1) { MulticoreWrapper <string, string> wrapper = new MulticoreWrapper <string, string>(nThreads, segmenter); try { for (string line; (line = br.ReadLine()) != null;) { nChars += line.Length; wrapper.Put(line); while (wrapper.Peek()) { pwOut.Println(wrapper.Poll()); } } wrapper.Join(); while (wrapper.Peek()) { pwOut.Println(wrapper.Poll()); } } catch (IOException e) { log.Warn(e); } } else { nChars = segmenter.Segment(br, pwOut); } long duration = Runtime.NanoTime() - startTime; double charsPerSec = (double)nChars / (duration / 1000000000.0); return(charsPerSec); }
/// <summary>Writes out data from this Object to the Writer w.</summary> /// <exception cref="System.IO.IOException"/> public override void WriteData(PrintWriter @out) { // all lines have one rule per line foreach (IntDependency dependency in argCounter.KeySet()) { if (dependency.head != wildTW && dependency.arg != wildTW && dependency.head.word != -1 && dependency.arg.word != -1) { double count = argCounter.GetCount(dependency); @out.Println(dependency.ToString(wordIndex, tagIndex) + " " + count); } } @out.Println("BEGIN_STOP"); foreach (IntDependency dependency_1 in stopCounter.KeySet()) { if (dependency_1.head.word != -1) { double count = stopCounter.GetCount(dependency_1); @out.Println(dependency_1.ToString(wordIndex, tagIndex) + " " + count); } } @out.Flush(); }
// this one is all side effects public virtual void Eval(ICollection <T> guesses, ICollection <T> golds, PrintWriter pw) { double precision = EvalPrecision(guesses, golds); lastPrecision = precision; double recall = EvalRecall(guesses, golds); lastRecall = recall; double f1 = (2 * precision * recall) / (precision + recall); lastF1 = f1; guessed += guesses.Count; guessedCorrect += (guesses.Count == 0.0 ? 0.0 : precision * guesses.Count); gold += golds.Count; goldCorrect += (golds.Count == 0.0 ? 0.0 : recall * golds.Count); pw.Println("This example:\tP:\t" + precision + " R:\t" + recall + " F1:\t" + f1); double cumPrecision = guessedCorrect / guessed; double cumRecall = goldCorrect / gold; double cumF1 = (2 * cumPrecision * cumRecall) / (cumPrecision + cumRecall); pw.Println("Cumulative:\tP:\t" + cumPrecision + " R:\t" + cumRecall + " F1:\t" + cumF1); }
/// <summary>Writes out a lot of redundant data from this Object to the Writer w.</summary> /// <param name="w">Data is written to this Writer</param> public virtual void WriteAllData(TextWriter w) { int numStates = index.Size(); PrintWriter @out = new PrintWriter(w); // all lines have one rule per line @out.Println("Unary ruleIterator"); for (IEnumerator <UnaryRule> rI = RuleIterator(); rI.MoveNext();) { @out.Println(rI.Current.ToString(index)); } @out.Println("Unary closedRuleIterator"); for (IEnumerator <UnaryRule> rI_1 = ClosedRuleIterator(); rI_1.MoveNext();) { @out.Println(rI_1.Current.ToString(index)); } @out.Println("Unary rulesWithParentIterator"); for (int i = 0; i < numStates; i++) { @out.Println(index.Get(i)); for (IEnumerator <UnaryRule> rI_2 = RuleIteratorByParent(i); rI_2.MoveNext();) { @out.Print(" "); @out.Println(rI_2.Current.ToString(index)); } } @out.Println("Unary closedRulesWithParentIterator"); for (int i_1 = 0; i_1 < numStates; i_1++) { @out.Println(index.Get(i_1)); for (IEnumerator <UnaryRule> rI_2 = ClosedRuleIteratorByParent(i_1); rI_2.MoveNext();) { @out.Print(" "); @out.Println(rI_2.Current.ToString(index)); } } @out.Flush(); }
public virtual void Display(bool verbose, PrintWriter pw) { double prec = precision2 / pnum2; //(num > 0.0 ? precision/num : 0.0); double rec = recall2 / rnum2; //(num > 0.0 ? recall/num : 0.0); double f = 2.0 / (1.0 / prec + 1.0 / rec); //(num > 0.0 ? f1/num : 0.0); //System.out.println(" Precision: "+((int)(10000.0*prec))/100.0); //System.out.println(" Recall: "+((int)(10000.0*rec))/100.0); //System.out.println(" F1: "+((int)(10000.0*f))/100.0); pw.Println(str + " summary evalb: LP: " + ((int)(10000.0 * prec)) / 100.0 + " LR: " + ((int)(10000.0 * rec)) / 100.0 + " F1: " + ((int)(10000.0 * f)) / 100.0 + " Exact: " + ((int)(10000.0 * exact / num)) / 100.0 + " N: " + GetNum()); }
public virtual void Log(Level loggingLevel, string method, object[] args) { IList throwables = TranslateArguments(args); @out.Println(FormatLine(Platform4.Now(), loggingLevel, method, args)); if (throwables != null) { for (IEnumerator tIter = throwables.GetEnumerator(); tIter.MoveNext();) { Exception t = ((Exception)tIter.Current); Platform4.PrintStackTrace(t, @out); } } }
/// <summary>Need to sort the counter by feature keys and dump it</summary> public static void PrintSVMLightFormat(PrintWriter pw, ClassicCounter <int> c, int classNo) { int[] features = Sharpen.Collections.ToArray(c.KeySet(), new int[c.KeySet().Count]); Arrays.Sort(features); StringBuilder sb = new StringBuilder(); sb.Append(classNo); sb.Append(' '); foreach (int f in features) { sb.Append(f + 1).Append(':').Append(c.GetCount(f)).Append(' '); } pw.Println(sb.ToString()); }
public virtual string ToSummaryString() { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); pw.Println("Number of data points: " + Size()); pw.Print("Number of labels: " + labelIndex.Size() + " ["); IEnumerator <L> iter = labelIndex.GetEnumerator(); while (iter.MoveNext()) { pw.Print(iter.Current); if (iter.MoveNext()) { pw.Print(", "); } } pw.Println("]"); pw.Println("Number of features (Phi(X) types): " + featureIndex.Size()); pw.Println("Number of active feature types: " + NumFeatureTypes()); pw.Println("Number of active feature tokens: " + NumFeatureTokens()); return(sw.ToString()); }
public virtual void WriteSVMLightFormat(PrintWriter writer) { foreach (RVFDatum <L, F> datum in this) { writer.Print(this.labelIndex.IndexOf(datum.Label())); ICounter <F> features = datum.AsFeaturesCounter(); foreach (F feature in features.KeySet()) { double count = features.GetCount(feature); writer.Format(Locale.English, " %s:%f", this.featureIndex.IndexOf(feature), count); } writer.Println(); } }
/// <summary> /// Modification of printFullFeatureMatrix to correct bugs and print values /// (Rajat). /// </summary> /// <remarks> /// Modification of printFullFeatureMatrix to correct bugs and print values /// (Rajat). Prints the full feature matrix in tab-delimited form. These can be /// BIG matrices, so be careful! /// </remarks> public virtual void PrintFullFeatureMatrixWithValues(PrintWriter pw) { string sep = "\t"; for (int i = 0; i < featureIndex.Size(); i++) { pw.Print(sep + featureIndex.Get(i)); } pw.Println(); for (int i_1 = 0; i_1 < size; i_1++) { // changed labels.length to size pw.Print(labelIndex.Get(labels[i_1])); // changed i to labels[i] IDictionary <int, double> feats = Generics.NewHashMap(); for (int j = 0; j < data[i_1].Length; j++) { int feature = data[i_1][j]; double val = values[i_1][j]; feats[int.Parse(feature)] = val; } for (int j_1 = 0; j_1 < featureIndex.Size(); j_1++) { if (feats.Contains(int.Parse(j_1))) { pw.Print(sep + feats[int.Parse(j_1)]); } else { pw.Print(sep); pw.Print(' '); } } pw.Println(); } pw.Flush(); }
/// <summary>Actually perform the GET request, given all the relevant information (already sanity checked).</summary> /// <remarks> /// Actually perform the GET request, given all the relevant information (already sanity checked). /// This is the meat of the servlet code. /// </remarks> /// <param name="out">The writer to write the output to.</param> /// <param name="q">The query string.</param> private void DoGet(PrintWriter @out, string q) { // Clean the string a bit q = q.Trim(); if (q.Length == 0) { return; } char lastChar = q[q.Length - 1]; if (lastChar != '.' && lastChar != '!' && lastChar != '?') { q = q + "."; } // Annotate Annotation ann = new Annotation(q); try { // Collect results ICollection <string> entailments = new HashSet <string>(); ICollection <string> triples = new LinkedHashSet <string>(); RunWithPipeline(pipeline, ann, triples, entailments); // pipeline must come before backoff if (triples.Count == 0) { RunWithPipeline(backoff, ann, triples, entailments); } // backoff must come after pipeline // Write results @out.Println("{ " + "\"ok\":true, " + "\"entailments\": [" + StringUtils.Join(entailments, ",") + "], " + "\"triples\": [" + StringUtils.Join(triples, ",") + "], " + "\"msg\": \"\"" + " }"); } catch (Exception t) { @out.Println("{ok:false, entailments:[], triples:[], msg:" + Quote(t.Message) + "}"); } }
/// <exception cref="System.IO.IOException"/> private void AddResults(IHttpServletRequest request, IHttpServletResponse response) { // if we can't handle UTF-8, need to do something like this... //String originalQuery = request.getParameter("q"); //String query = WebappUtil.convertString(originalQuery); string query = request.GetParameter("q"); string dateString = request.GetParameter("d"); // TODO: this always returns true... bool dateError = !pipeline.IsDateOkay(dateString); bool includeOffsets = ParseBoolean(request.GetParameter("includeOffsets")); PrintWriter @out = response.GetWriter(); if (dateError) { @out.Println("<br><br>Warning: unparseable date " + StringEscapeUtils.EscapeHtml4(dateString)); } if (!StringUtils.IsNullOrEmpty(query)) { Properties props = GetTimeAnnotatorProperties(request); string annotatorType = request.GetParameter("annotator"); if (annotatorType == null) { annotatorType = "sutime"; } IAnnotator timeAnnotator = pipeline.GetTimeAnnotator(annotatorType, props); if (timeAnnotator != null) { Annotation anno = pipeline.Process(query, dateString, timeAnnotator); @out.Println("<h3>Annotated Text</h3> <em>(tagged using " + annotatorType + "</em>)"); DisplayAnnotation(@out, query, anno, includeOffsets); } else { @out.Println("<br><br>Error creating annotator for " + StringEscapeUtils.EscapeHtml4(annotatorType)); } } }
/// <summary> /// <inheritDoc/> /// /// </summary> public override void PrintSparseFeatureMatrix(PrintWriter pw) { string sep = "\t"; for (int i = 0; i < size; i++) { pw.Print(labelIndex.Get(labels[i])); int[] datum = data[i]; foreach (int j in datum) { pw.Print(sep + featureIndex.Get(j)); } pw.Println(); } }
public virtual void PrintWeightVector(PrintWriter writer) { SortedDictionary <string, double> sortedWeights = GetWeightVector(); foreach (KeyValuePair <string, double> e in sortedWeights) { if (writer == null) { Redwood.Log("scoref.train", e.Key + " => " + e.Value); } else { writer.Println(e.Key + " => " + e.Value); } } }
private static void Display <T>(ClassicCounter <T> c, int num, PrintWriter pw) { IList <T> rules = new List <T>(c.KeySet()); rules.Sort(Counters.ToComparatorDescending(c)); int rSize = rules.Count; if (num > rSize) { num = rSize; } for (int i = 0; i < num; i++) { pw.Println(rules[i] + " " + c.GetCount(rules[i])); } }
public virtual void RecordScore(IKBestViterbiParser parser, PrintWriter pw) { double score = parser.GetBestScore(); totScore += score; n++; if (pw != null) { pw.Print(str + " score: " + nf.Format(score)); if (runningAverages) { pw.Print(" average score: " + nf.Format(totScore / n)); } pw.Println(); } }
public static void PrintStackTrace(Exception e, PrintWriter writer) { writer.Println(e.ToString()); }