/// <exception cref="System.IO.IOException"/> /// <exception cref="System.TypeLoadException"/> public virtual IDictionary <string, double[]> GetEmbeddings(string cacheFilename, IList <CoNLLBenchmark.CoNLLSentence> sentences) { File f = new File(cacheFilename); IDictionary <string, double[]> trimmedSet; if (!f.Exists()) { trimmedSet = new Dictionary <string, double[]>(); IDictionary <string, double[]> massiveSet = LoadEmbeddingsFromFile("../google-300.txt"); log.Info("Got massive embedding set size " + massiveSet.Count); foreach (CoNLLBenchmark.CoNLLSentence sentence in sentences) { foreach (string token in sentence.token) { if (massiveSet.Contains(token)) { trimmedSet[token] = massiveSet[token]; } } } log.Info("Got trimmed embedding set size " + trimmedSet.Count); f.CreateNewFile(); ObjectOutputStream oos = new ObjectOutputStream(new GZIPOutputStream(new FileOutputStream(cacheFilename))); oos.WriteObject(trimmedSet); oos.Close(); log.Info("Wrote trimmed set to file"); } else { ObjectInputStream ois = new ObjectInputStream(new GZIPInputStream(new FileInputStream(cacheFilename))); trimmedSet = (IDictionary <string, double[]>)ois.ReadObject(); ois.Close(); } return(trimmedSet); }
/// <summary>Loads the model from disk.</summary> /// <param name="path">The location of model that was saved to disk</param> /// <exception cref="System.InvalidCastException">if model is the wrong format</exception> /// <exception cref="System.IO.IOException"> /// if the model file doesn't exist or is otherwise /// unavailable/incomplete /// </exception> /// <exception cref="System.TypeLoadException">this would probably indicate a serious classpath problem</exception> public static Edu.Stanford.Nlp.IE.Machinereading.BasicEntityExtractor Load(string path, Type entityClassifier, bool preferDefaultGazetteer) { // load the additional arguments // try to load the extra file from the CLASSPATH first InputStream @is = typeof(Edu.Stanford.Nlp.IE.Machinereading.BasicEntityExtractor).GetClassLoader().GetResourceAsStream(path + ".extra"); // if not found in the CLASSPATH, load from the file system if (@is == null) { @is = new FileInputStream(path + ".extra"); } ObjectInputStream @in = new ObjectInputStream(@is); string gazetteerLocation = ErasureUtils.UncheckedCast <string>(@in.ReadObject()); if (preferDefaultGazetteer) { gazetteerLocation = DefaultPaths.DefaultNflGazetteer; } ICollection <string> annotationsToSkip = ErasureUtils.UncheckedCast <ICollection <string> >(@in.ReadObject()); bool useSubTypes = ErasureUtils.UncheckedCast <bool>(@in.ReadObject()); bool useBIO = ErasureUtils.UncheckedCast <bool>(@in.ReadObject()); @in.Close(); @is.Close(); Edu.Stanford.Nlp.IE.Machinereading.BasicEntityExtractor extractor = (Edu.Stanford.Nlp.IE.Machinereading.BasicEntityExtractor)MachineReading.MakeEntityExtractor(entityClassifier, gazetteerLocation); // load the CRF classifier (this works from any resource, e.g., classpath or file system) extractor.classifier = CRFClassifier.GetClassifier(path); // copy the extra arguments extractor.annotationsToSkip = annotationsToSkip; extractor.useSubTypes = useSubTypes; extractor.useBIO = useBIO; return(extractor); }
public static IList <Tree> ConvertToTrees(byte[] input) { try { IList <Tree> output = new List <Tree>(); ByteArrayInputStream bis = new ByteArrayInputStream(input); GZIPInputStream gis = new GZIPInputStream(bis); ObjectInputStream ois = new ObjectInputStream(gis); int size = ErasureUtils.UncheckedCast <int>(ois.ReadObject()); for (int i = 0; i < size; ++i) { string rawTree = ErasureUtils.UncheckedCast(ois.ReadObject()); Tree tree = Tree.ValueOf(rawTree, trf); tree.SetSpans(); output.Add(tree); } ois.Close(); gis.Close(); bis.Close(); return(output); } catch (IOException e) { throw new RuntimeIOException(e); } catch (TypeLoadException e) { throw new Exception(e); } }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.TypeLoadException"/> private static void DemonstrateSerializationColumnDataClassifier() { System.Console.Out.WriteLine(); System.Console.Out.WriteLine("Demonstrating working with a serialized classifier using serializeTo"); ColumnDataClassifier cdc = new ColumnDataClassifier(where + "examples/cheese2007.prop"); cdc.TrainClassifier(where + "examples/cheeseDisease.train"); // Exhibit serialization and deserialization working. Serialized to bytes in memory for simplicity System.Console.Out.WriteLine(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); cdc.SerializeClassifier(oos); oos.Close(); byte[] @object = baos.ToByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(@object); ObjectInputStream ois = new ObjectInputStream(bais); ColumnDataClassifier cdc2 = ColumnDataClassifier.GetClassifier(ois); ois.Close(); // We compare the output of the deserialized classifier cdc2 versus the original one cl // For both we use a ColumnDataClassifier to convert text lines to examples System.Console.Out.WriteLine("Making predictions with both classifiers"); foreach (string line in ObjectBank.GetLineIterator(where + "examples/cheeseDisease.test", "utf-8")) { IDatum <string, string> d = cdc.MakeDatumFromLine(line); IDatum <string, string> d2 = cdc2.MakeDatumFromLine(line); System.Console.Out.Printf("%s =origi=> %s (%.4f)%n", line, cdc.ClassOf(d), cdc.ScoresOf(d).GetCount(cdc.ClassOf(d))); System.Console.Out.Printf("%s =deser=> %s (%.4f)%n", line, cdc2.ClassOf(d2), cdc2.ScoresOf(d).GetCount(cdc2.ClassOf(d))); } }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.TypeLoadException"/> private static Edu.Stanford.Nlp.Classify.MultinomialLogisticClassifier <LL, FF> Load <Ll, Ff>(string path) { System.Console.Error.Write("Loading classifier from " + path + "... "); ObjectInputStream @in = new ObjectInputStream(new FileInputStream(path)); double[][] myWeights = ErasureUtils.UncheckedCast(@in.ReadObject()); IIndex <FF> myFeatureIndex = ErasureUtils.UncheckedCast(@in.ReadObject()); IIndex <LL> myLabelIndex = ErasureUtils.UncheckedCast(@in.ReadObject()); @in.Close(); System.Console.Error.WriteLine("done."); return(new Edu.Stanford.Nlp.Classify.MultinomialLogisticClassifier <LL, FF>(myWeights, myFeatureIndex, myLabelIndex)); }
public virtual void TestReadWriteStreamFromString() { ObjectOutputStream oos = IOUtils.WriteStreamFromString(dirPath + "/objs.obj"); oos.WriteObject(int.Parse(42)); oos.WriteObject("forty two"); oos.Close(); ObjectInputStream ois = IOUtils.ReadStreamFromString(dirPath + "/objs.obj"); object i = ois.ReadObject(); object s = ois.ReadObject(); NUnit.Framework.Assert.IsTrue(int.Parse(42).Equals(i)); NUnit.Framework.Assert.IsTrue("forty two".Equals(s)); ois.Close(); }
/// <summary> /// Prints out the pair of /// <c>Extractors</c> /// objects found in the /// file that is the first and only argument. /// </summary> /// <param name="args"> /// Filename of extractors file (standardly written with /// <c>.ex</c> /// extension) /// </param> public static void Main(string[] args) { try { ObjectInputStream @in = new ObjectInputStream(new FileInputStream(args[0])); Edu.Stanford.Nlp.Tagger.Maxent.Extractors extrs = (Edu.Stanford.Nlp.Tagger.Maxent.Extractors)@in.ReadObject(); Edu.Stanford.Nlp.Tagger.Maxent.Extractors extrsRare = (Edu.Stanford.Nlp.Tagger.Maxent.Extractors)@in.ReadObject(); @in.Close(); System.Console.Out.WriteLine("All words: " + extrs); System.Console.Out.WriteLine("Rare words: " + extrsRare); } catch (Exception e) { throw new Exception(e); } }
public T GetObject <T>(string key) where T : Java.Lang.Object { if (mSP.Contains(key)) { string objectVal = mSP.GetString(key, null); byte[] buffer = Base64.Decode(objectVal, Base64Flags.Default); System.IO.MemoryStream bais = new System.IO.MemoryStream(buffer); ObjectInputStream ois = null; try { ois = new ObjectInputStream(bais); T t = (T)ois.ReadObject(); return(t); } catch (StreamCorruptedException e) { e.PrintStackTrace(); } catch (IOException e) { e.PrintStackTrace(); } catch (ClassNotFoundException e) { e.PrintStackTrace(); } finally { try { if (bais != null) { bais.Close(); } if (ois != null) { ois.Close(); } } catch (IOException e) { e.PrintStackTrace(); } } } return(null); }
public ExtractQuotesClassifier(string modelPath) { try { ObjectInputStream si = IOUtils.ReadStreamFromString(modelPath); quoteToMentionClassifier = (IClassifier <string, string>)si.ReadObject(); si.Close(); } catch (FileNotFoundException e) { Sharpen.Runtime.PrintStackTrace(e); throw new Exception(); } catch (TypeLoadException e) { Sharpen.Runtime.PrintStackTrace(e); throw new Exception(); } catch (IOException e) { Sharpen.Runtime.PrintStackTrace(e); throw new Exception(); } }
/// <exception cref="System.IO.IOException"/> public static void Main(string[] args) { IDictionary <string, int> flagsToNumArgs = Generics.NewHashMap(); flagsToNumArgs["-parser"] = int.Parse(3); flagsToNumArgs["-lex"] = int.Parse(3); flagsToNumArgs["-test"] = int.Parse(2); flagsToNumArgs["-out"] = int.Parse(1); flagsToNumArgs["-lengthPenalty"] = int.Parse(1); flagsToNumArgs["-penaltyType"] = int.Parse(1); flagsToNumArgs["-maxLength"] = int.Parse(1); flagsToNumArgs["-stats"] = int.Parse(2); IDictionary <string, string[]> argMap = StringUtils.ArgsToMap(args, flagsToNumArgs); bool eval = argMap.Contains("-eval"); PrintWriter pw = null; if (argMap.Contains("-out")) { pw = new PrintWriter(new OutputStreamWriter(new FileOutputStream((argMap["-out"])[0]), "GB18030"), true); } log.Info("ChineseCharacterBasedLexicon called with args:"); ChineseTreebankParserParams ctpp = new ChineseTreebankParserParams(); for (int i = 0; i < args.Length; i++) { ctpp.SetOptionFlag(args, i); log.Info(" " + args[i]); } log.Info(); Options op = new Options(ctpp); if (argMap.Contains("-stats")) { string[] statArgs = (argMap["-stats"]); MemoryTreebank rawTrainTreebank = op.tlpParams.MemoryTreebank(); IFileFilter trainFilt = new NumberRangesFileFilter(statArgs[1], false); rawTrainTreebank.LoadPath(new File(statArgs[0]), trainFilt); log.Info("Done reading trees."); MemoryTreebank trainTreebank; if (argMap.Contains("-annotate")) { trainTreebank = new MemoryTreebank(); TreeAnnotator annotator = new TreeAnnotator(ctpp.HeadFinder(), ctpp, op); foreach (Tree tree in rawTrainTreebank) { trainTreebank.Add(annotator.TransformTree(tree)); } log.Info("Done annotating trees."); } else { trainTreebank = rawTrainTreebank; } PrintStats(trainTreebank, pw); System.Environment.Exit(0); } int maxLength = 1000000; // Test.verbose = true; if (argMap.Contains("-norm")) { op.testOptions.lengthNormalization = true; } if (argMap.Contains("-maxLength")) { maxLength = System.Convert.ToInt32((argMap["-maxLength"])[0]); } op.testOptions.maxLength = 120; bool combo = argMap.Contains("-combo"); if (combo) { ctpp.useCharacterBasedLexicon = true; op.testOptions.maxSpanForTags = 10; op.doDep = false; op.dcTags = false; } LexicalizedParser lp = null; ILexicon lex = null; if (argMap.Contains("-parser")) { string[] parserArgs = (argMap["-parser"]); if (parserArgs.Length > 1) { IFileFilter trainFilt = new NumberRangesFileFilter(parserArgs[1], false); lp = LexicalizedParser.TrainFromTreebank(parserArgs[0], trainFilt, op); if (parserArgs.Length == 3) { string filename = parserArgs[2]; log.Info("Writing parser in serialized format to file " + filename + " "); System.Console.Error.Flush(); ObjectOutputStream @out = IOUtils.WriteStreamFromString(filename); @out.WriteObject(lp); @out.Close(); log.Info("done."); } } else { string parserFile = parserArgs[0]; lp = LexicalizedParser.LoadModel(parserFile, op); } lex = lp.GetLexicon(); op = lp.GetOp(); ctpp = (ChineseTreebankParserParams)op.tlpParams; } if (argMap.Contains("-rad")) { ctpp.useUnknownCharacterModel = true; } if (argMap.Contains("-lengthPenalty")) { ctpp.lengthPenalty = double.Parse((argMap["-lengthPenalty"])[0]); } if (argMap.Contains("-penaltyType")) { ctpp.penaltyType = System.Convert.ToInt32((argMap["-penaltyType"])[0]); } if (argMap.Contains("-lex")) { string[] lexArgs = (argMap["-lex"]); if (lexArgs.Length > 1) { IIndex <string> wordIndex = new HashIndex <string>(); IIndex <string> tagIndex = new HashIndex <string>(); lex = ctpp.Lex(op, wordIndex, tagIndex); MemoryTreebank rawTrainTreebank = op.tlpParams.MemoryTreebank(); IFileFilter trainFilt = new NumberRangesFileFilter(lexArgs[1], false); rawTrainTreebank.LoadPath(new File(lexArgs[0]), trainFilt); log.Info("Done reading trees."); MemoryTreebank trainTreebank; if (argMap.Contains("-annotate")) { trainTreebank = new MemoryTreebank(); TreeAnnotator annotator = new TreeAnnotator(ctpp.HeadFinder(), ctpp, op); foreach (Tree tree in rawTrainTreebank) { tree = annotator.TransformTree(tree); trainTreebank.Add(tree); } log.Info("Done annotating trees."); } else { trainTreebank = rawTrainTreebank; } lex.InitializeTraining(trainTreebank.Count); lex.Train(trainTreebank); lex.FinishTraining(); log.Info("Done training lexicon."); if (lexArgs.Length == 3) { string filename = lexArgs.Length == 3 ? lexArgs[2] : "parsers/chineseCharLex.ser.gz"; log.Info("Writing lexicon in serialized format to file " + filename + " "); System.Console.Error.Flush(); ObjectOutputStream @out = IOUtils.WriteStreamFromString(filename); @out.WriteObject(lex); @out.Close(); log.Info("done."); } } else { string lexFile = lexArgs.Length == 1 ? lexArgs[0] : "parsers/chineseCharLex.ser.gz"; log.Info("Reading Lexicon from file " + lexFile); ObjectInputStream @in = IOUtils.ReadStreamFromString(lexFile); try { lex = (ILexicon)@in.ReadObject(); } catch (TypeLoadException) { throw new Exception("Bad serialized file: " + lexFile); } @in.Close(); } } if (argMap.Contains("-test")) { bool segmentWords = ctpp.segment; bool parse = lp != null; System.Diagnostics.Debug.Assert((parse || segmentWords)); // WordCatConstituent.collinizeWords = argMap.containsKey("-collinizeWords"); // WordCatConstituent.collinizeTags = argMap.containsKey("-collinizeTags"); IWordSegmenter seg = null; if (segmentWords) { seg = (IWordSegmenter)lex; } string[] testArgs = (argMap["-test"]); MemoryTreebank testTreebank = op.tlpParams.MemoryTreebank(); IFileFilter testFilt = new NumberRangesFileFilter(testArgs[1], false); testTreebank.LoadPath(new File(testArgs[0]), testFilt); ITreeTransformer subcategoryStripper = op.tlpParams.SubcategoryStripper(); ITreeTransformer collinizer = ctpp.Collinizer(); WordCatEquivalenceClasser eqclass = new WordCatEquivalenceClasser(); WordCatEqualityChecker eqcheck = new WordCatEqualityChecker(); EquivalenceClassEval basicEval = new EquivalenceClassEval(eqclass, eqcheck, "basic"); EquivalenceClassEval collinsEval = new EquivalenceClassEval(eqclass, eqcheck, "collinized"); IList <string> evalTypes = new List <string>(3); bool goodPOS = false; if (segmentWords) { evalTypes.Add(WordCatConstituent.wordType); if (ctpp.segmentMarkov && !parse) { evalTypes.Add(WordCatConstituent.tagType); goodPOS = true; } } if (parse) { evalTypes.Add(WordCatConstituent.tagType); evalTypes.Add(WordCatConstituent.catType); if (combo) { evalTypes.Add(WordCatConstituent.wordType); goodPOS = true; } } TreeToBracketProcessor proc = new TreeToBracketProcessor(evalTypes); log.Info("Testing..."); foreach (Tree goldTop in testTreebank) { Tree gold = goldTop.FirstChild(); IList <IHasWord> goldSentence = gold.YieldHasWord(); if (goldSentence.Count > maxLength) { log.Info("Skipping sentence; too long: " + goldSentence.Count); continue; } else { log.Info("Processing sentence; length: " + goldSentence.Count); } IList <IHasWord> s; if (segmentWords) { StringBuilder goldCharBuf = new StringBuilder(); foreach (IHasWord aGoldSentence in goldSentence) { StringLabel word = (StringLabel)aGoldSentence; goldCharBuf.Append(word.Value()); } string goldChars = goldCharBuf.ToString(); s = seg.Segment(goldChars); } else { s = goldSentence; } Tree tree; if (parse) { tree = lp.ParseTree(s); if (tree == null) { throw new Exception("PARSER RETURNED NULL!!!"); } } else { tree = Edu.Stanford.Nlp.Trees.Trees.ToFlatTree(s); tree = subcategoryStripper.TransformTree(tree); } if (pw != null) { if (parse) { tree.PennPrint(pw); } else { IEnumerator sentIter = s.GetEnumerator(); for (; ;) { Word word = (Word)sentIter.Current; pw.Print(word.Word()); if (sentIter.MoveNext()) { pw.Print(" "); } else { break; } } } pw.Println(); } if (eval) { ICollection ourBrackets; ICollection goldBrackets; ourBrackets = proc.AllBrackets(tree); goldBrackets = proc.AllBrackets(gold); if (goodPOS) { Sharpen.Collections.AddAll(ourBrackets, TreeToBracketProcessor.CommonWordTagTypeBrackets(tree, gold)); Sharpen.Collections.AddAll(goldBrackets, TreeToBracketProcessor.CommonWordTagTypeBrackets(gold, tree)); } basicEval.Eval(ourBrackets, goldBrackets); System.Console.Out.WriteLine("\nScores:"); basicEval.DisplayLast(); Tree collinsTree = collinizer.TransformTree(tree); Tree collinsGold = collinizer.TransformTree(gold); ourBrackets = proc.AllBrackets(collinsTree); goldBrackets = proc.AllBrackets(collinsGold); if (goodPOS) { Sharpen.Collections.AddAll(ourBrackets, TreeToBracketProcessor.CommonWordTagTypeBrackets(collinsTree, collinsGold)); Sharpen.Collections.AddAll(goldBrackets, TreeToBracketProcessor.CommonWordTagTypeBrackets(collinsGold, collinsTree)); } collinsEval.Eval(ourBrackets, goldBrackets); System.Console.Out.WriteLine("\nCollinized scores:"); collinsEval.DisplayLast(); System.Console.Out.WriteLine(); } } if (eval) { basicEval.Display(); System.Console.Out.WriteLine(); collinsEval.Display(); } } }
public virtual void TestClassicCounterHistoricalMain() { c.SetCount("p", 0); c.SetCount("q", 2); ClassicCounter <string> small_c = new ClassicCounter <string>(c); ICounter <string> c7 = c.GetFactory().Create(); c7.AddAll(c); NUnit.Framework.Assert.AreEqual(c.TotalCount(), 2.0); c.IncrementCount("p"); NUnit.Framework.Assert.AreEqual(c.TotalCount(), 3.0); c.IncrementCount("p", 2.0); NUnit.Framework.Assert.AreEqual(Counters.Min(c), 2.0); NUnit.Framework.Assert.AreEqual(Counters.Argmin(c), "q"); // Now p is p=3.0, q=2.0 c.SetCount("w", -5.0); c.SetCount("x", -4.5); IList <string> biggestKeys = new List <string>(c.KeySet()); NUnit.Framework.Assert.AreEqual(biggestKeys.Count, 4); biggestKeys.Sort(Counters.ToComparator(c, false, true)); NUnit.Framework.Assert.AreEqual("w", biggestKeys[0]); NUnit.Framework.Assert.AreEqual("x", biggestKeys[1]); NUnit.Framework.Assert.AreEqual("p", biggestKeys[2]); NUnit.Framework.Assert.AreEqual("q", biggestKeys[3]); NUnit.Framework.Assert.AreEqual(Counters.Min(c), -5.0, Tolerance); NUnit.Framework.Assert.AreEqual(Counters.Argmin(c), "w"); NUnit.Framework.Assert.AreEqual(Counters.Max(c), 3.0, Tolerance); NUnit.Framework.Assert.AreEqual(Counters.Argmax(c), "p"); if (integral) { NUnit.Framework.Assert.AreEqual(Counters.Mean(c), -1.0); } else { NUnit.Framework.Assert.AreEqual(Counters.Mean(c), -1.125, Tolerance); } if (!integral) { // only do this for floating point counters. Too much bother to rewrite c.SetCount("x", -2.5); ClassicCounter <string> c2 = new ClassicCounter <string>(c); NUnit.Framework.Assert.AreEqual(3.0, c2.GetCount("p")); NUnit.Framework.Assert.AreEqual(2.0, c2.GetCount("q")); NUnit.Framework.Assert.AreEqual(-5.0, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(-2.5, c2.GetCount("x")); ICounter <string> c3 = c.GetFactory().Create(); foreach (string str in c2.KeySet()) { c3.IncrementCount(str); } NUnit.Framework.Assert.AreEqual(1.0, c3.GetCount("p")); NUnit.Framework.Assert.AreEqual(1.0, c3.GetCount("q")); NUnit.Framework.Assert.AreEqual(1.0, c3.GetCount("w")); NUnit.Framework.Assert.AreEqual(1.0, c3.GetCount("x")); Counters.AddInPlace(c2, c3, 10.0); NUnit.Framework.Assert.AreEqual(13.0, c2.GetCount("p")); NUnit.Framework.Assert.AreEqual(12.0, c2.GetCount("q")); NUnit.Framework.Assert.AreEqual(5.0, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(7.5, c2.GetCount("x")); c3.AddAll(c); NUnit.Framework.Assert.AreEqual(4.0, c3.GetCount("p")); NUnit.Framework.Assert.AreEqual(3.0, c3.GetCount("q")); NUnit.Framework.Assert.AreEqual(-4.0, c3.GetCount("w")); NUnit.Framework.Assert.AreEqual(-1.5, c3.GetCount("x")); Counters.SubtractInPlace(c3, c); NUnit.Framework.Assert.AreEqual(1.0, c3.GetCount("p")); NUnit.Framework.Assert.AreEqual(1.0, c3.GetCount("q")); NUnit.Framework.Assert.AreEqual(1.0, c3.GetCount("w")); NUnit.Framework.Assert.AreEqual(1.0, c3.GetCount("x")); foreach (string str_1 in c.KeySet()) { c3.IncrementCount(str_1); } NUnit.Framework.Assert.AreEqual(2.0, c3.GetCount("p")); NUnit.Framework.Assert.AreEqual(2.0, c3.GetCount("q")); NUnit.Framework.Assert.AreEqual(2.0, c3.GetCount("w")); NUnit.Framework.Assert.AreEqual(2.0, c3.GetCount("x")); Counters.DivideInPlace(c2, c3); NUnit.Framework.Assert.AreEqual(6.5, c2.GetCount("p")); NUnit.Framework.Assert.AreEqual(6.0, c2.GetCount("q")); NUnit.Framework.Assert.AreEqual(2.5, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(3.75, c2.GetCount("x")); Counters.DivideInPlace(c2, 0.5); NUnit.Framework.Assert.AreEqual(13.0, c2.GetCount("p")); NUnit.Framework.Assert.AreEqual(12.0, c2.GetCount("q")); NUnit.Framework.Assert.AreEqual(5.0, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(7.5, c2.GetCount("x")); Counters.MultiplyInPlace(c2, 2.0); NUnit.Framework.Assert.AreEqual(26.0, c2.GetCount("p")); NUnit.Framework.Assert.AreEqual(24.0, c2.GetCount("q")); NUnit.Framework.Assert.AreEqual(10.0, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(15.0, c2.GetCount("x")); Counters.DivideInPlace(c2, 2.0); NUnit.Framework.Assert.AreEqual(13.0, c2.GetCount("p")); NUnit.Framework.Assert.AreEqual(12.0, c2.GetCount("q")); NUnit.Framework.Assert.AreEqual(5.0, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(7.5, c2.GetCount("x")); foreach (string str_2 in c2.KeySet()) { c2.IncrementCount(str_2); } NUnit.Framework.Assert.AreEqual(14.0, c2.GetCount("p")); NUnit.Framework.Assert.AreEqual(13.0, c2.GetCount("q")); NUnit.Framework.Assert.AreEqual(6.0, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(8.5, c2.GetCount("x")); foreach (string str_3 in c.KeySet()) { c2.IncrementCount(str_3); } NUnit.Framework.Assert.AreEqual(15.0, c2.GetCount("p")); NUnit.Framework.Assert.AreEqual(14.0, c2.GetCount("q")); NUnit.Framework.Assert.AreEqual(7.0, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(9.5, c2.GetCount("x")); c2.AddAll(small_c); NUnit.Framework.Assert.AreEqual(15.0, c2.GetCount("p")); NUnit.Framework.Assert.AreEqual(16.0, c2.GetCount("q")); NUnit.Framework.Assert.AreEqual(7.0, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(9.5, c2.GetCount("x")); NUnit.Framework.Assert.AreEqual(new HashSet <string>(Arrays.AsList("p", "q")), Counters.KeysAbove(c2, 14)); NUnit.Framework.Assert.AreEqual(new HashSet <string>(Arrays.AsList("q")), Counters.KeysAt(c2, 16)); NUnit.Framework.Assert.AreEqual(new HashSet <string>(Arrays.AsList("x", "w")), Counters.KeysBelow(c2, 9.5)); Counters.AddInPlace(c2, small_c, -6); NUnit.Framework.Assert.AreEqual(15.0, c2.GetCount("p")); NUnit.Framework.Assert.AreEqual(4.0, c2.GetCount("q")); NUnit.Framework.Assert.AreEqual(7.0, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(9.5, c2.GetCount("x")); Counters.SubtractInPlace(c2, small_c); Counters.SubtractInPlace(c2, small_c); Counters.RetainNonZeros(c2); NUnit.Framework.Assert.AreEqual(15.0, c2.GetCount("p")); NUnit.Framework.Assert.IsFalse(c2.ContainsKey("q")); NUnit.Framework.Assert.AreEqual(7.0, c2.GetCount("w")); NUnit.Framework.Assert.AreEqual(9.5, c2.GetCount("x")); } // serialize to Stream if (c is ISerializable) { try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream @out = new ObjectOutputStream(new BufferedOutputStream(baos)); @out.WriteObject(c); @out.Close(); // reconstitute byte[] bytes = baos.ToByteArray(); ObjectInputStream @in = new ObjectInputStream(new BufferedInputStream(new ByteArrayInputStream(bytes))); c = IOUtils.ReadObjectFromObjectStream(@in); @in.Close(); if (!this.integral) { NUnit.Framework.Assert.AreEqual(-2.5, c.TotalCount()); NUnit.Framework.Assert.AreEqual(-5.0, Counters.Min(c)); NUnit.Framework.Assert.AreEqual("w", Counters.Argmin(c)); } c.Clear(); if (!this.integral) { NUnit.Framework.Assert.AreEqual(0.0, c.TotalCount()); } } catch (IOException ioe) { Fail("IOException: " + ioe); } catch (TypeLoadException cce) { Fail("ClassNotFoundException: " + cce); } } }
/// <exception cref="System.IO.IOException"/> public virtual void Close() { ois.Close(); }