public SequenceMap() { sequences = new TreeSet<long>(); values = new AList<string>(100); firstValueSequence = 1; lastSequence = 0; }
private static void DontModifyInner() { Console.WriteLine("\nMake a snapshot and add it to outer"); ICollection<ISequenced<int>> outer = new HashSet<ISequenced<int>>(); for (int i = 0; i < 100; i++) { ISequenced<int> inner = new TreeSet<int>(); inner.Add(i); inner.Add(i + 1); outer.Add(inner); } IPersistentSorted<int> inner1 = new TreeSet<int>(), inner2 = new TreeSet<int>(), inner3 = new TreeSet<int>(); inner1.AddAll(new[] { 2, 3, 5, 7, 11 }); inner2.AddAll(inner1); inner2.Add(13); inner3.AddAll(inner1); // Take a snapshot and add it to outer: outer.Add(inner1.Snapshot()); Console.WriteLine("inner1 in outer: {0}", outer.Contains(inner1)); Console.WriteLine("inner2 in outer: {0}", outer.Contains(inner2)); Console.WriteLine("inner3 in outer: {0}", outer.Contains(inner3)); inner1.Add(13); Console.WriteLine("inner1 equals inner2: {0}", outer.EqualityComparer.Equals(inner1, inner2)); Console.WriteLine("inner1 equals inner3: {0}", outer.EqualityComparer.Equals(inner1, inner3)); Console.WriteLine("inner1 in outer: {0}", outer.Contains(inner1)); Console.WriteLine("inner2 in outer: {0}", outer.Contains(inner2)); Console.WriteLine("inner3 in outer: {0}", outer.Contains(inner3)); Console.WriteLine("outer.Count: {0}", outer.Count); }
public TSPAgenda() { agenda = new Stack<CityState>(); candidates = new TreeSet<CityState>(); PruneCount = 0; MaxSize = 0; }
// Anti-pattern: modifying an inner collection while it is a // member of an outer one may cause it to be lost from the outer // collection. private static void ModifyInner() { Console.WriteLine("\nAnti-pattern: Add to outer, modify, lose"); ICollection<ISequenced<int>> outer = new HashSet<ISequenced<int>>(); for (int i = 0; i < 100; i++) { ISequenced<int> inner = new TreeSet<int>(); inner.Add(i); inner.Add(i + 1); outer.Add(inner); } ISequenced<int> inner1 = new TreeSet<int>(), inner2 = new TreeSet<int>(), inner3 = new TreeSet<int>(); inner1.AddAll(new[] { 2, 3, 5, 7, 11 }); inner2.AddAll(inner1); inner2.Add(13); inner3.AddAll(inner1); outer.Add(inner1); Console.WriteLine("inner1 in outer: {0}", outer.Contains(inner1)); Console.WriteLine("inner2 in outer: {0}", outer.Contains(inner2)); Console.WriteLine("inner3 in outer: {0}", outer.Contains(inner3)); inner1.Add(13); Console.WriteLine("inner1 equals inner2: {0}", outer.EqualityComparer.Equals(inner1, inner2)); Console.WriteLine("inner1 equals inner3: {0}", outer.EqualityComparer.Equals(inner1, inner3)); Console.WriteLine("inner1 in outer: {0}", outer.Contains(inner1)); Console.WriteLine("inner2 in outer: {0}", outer.Contains(inner2)); Console.WriteLine("inner3 in outer: {0}", outer.Contains(inner3)); Console.WriteLine("outer.Count: {0}", outer.Count); }
public void testCollection() { List<String> data = new TreeSet<String>(); data.Add("foo"); data.Add("bar"); data.Add("baz"); Assert.AreEqual(TEST_DATA, IteratorUtils.iterableToList(data)); }
public SequenceMap() { // Sequence numbers currently in the map // last generated sequence // values of remaining sequences // sequence # of first item in _values sequences = new TreeSet<long>(); values = new List<string>(100); firstValueSequence = 1; lastSequence = 0; }
public MetaGdl(Parser p) { _relations = new TreeDictionary<int, RelationInfo>(); _functionSymbols = new TreeSet<int>(); _objectSymbols = new TreeSet<int>(); _parser = p; _rules = new TreeDictionary<int, List<Implication>>(); _groundFacts = new TreeDictionary<int, List<GroundFact>>(); _roles = new List<TermObject>(); InsertReservedKeywords(); }
// Given a sequence of strings, return all non-trivial anagram // classes. // Using HashBag<char> and an unsequenced equalityComparer, this performs as // follows on 1600 MHz Mobile P4 and .Net 2.0 beta 1 (wall-clock // time): // 50 000 words 2 822 classes 2.0 sec // 100 000 words 5 593 classes 4.3 sec // 200 000 words 11 705 classes 8.8 sec // 300 000 words 20 396 classes 52.0 sec includes swapping // 347 165 words 24 428 classes 146.0 sec includes swapping // The maximal memory consumption is less than 180 MB. public static SCG.IEnumerable<SCG.IEnumerable<String>> AnagramClasses(SCG.IEnumerable<String> ss) { IDictionary<HashBag<char>, TreeSet<String>> classes = new HashDictionary<HashBag<char>, TreeSet<String>>(); foreach (String s in ss) { HashBag<char> anagram = AnagramClass(s); TreeSet<String> anagramClass; if (!classes.Find(ref anagram, out anagramClass)) classes[anagram] = anagramClass = new TreeSet<String>(); anagramClass.Add(s); } foreach (TreeSet<String> anagramClass in classes.Values) if (anagramClass.Count > 1) yield return anagramClass; }
/** * creates a Delaunay Triangulation from all the points. Note: duplicated * points are ignored. */ public Delaunay_Triangulation(Point_dt[] ps) { _modCount = 0; _modCount2 = 0; _bb_min = null; _bb_max = null; this._vertices = new TreeSet<Point_dt>(Point_dt.getComparator()); _triangles = new List<Triangle_dt>(); allCollinear = true; for (int i = 0; ps != null && i < ps.Length && ps[i] != null; i++) { this.insertPoint(ps[i]); } // build grid points to make find faster gridPoints = new PointsGridDT(5, this); }
public static void IntSetSet() { ICollection<ISequenced<int>> outer = new HashSet<ISequenced<int>>(); int[] ss = { 2, 3, 5, 7 }; TreeSet<int> inner = new TreeSet<int>(); outer.Add(inner.Snapshot()); foreach (int i in ss) { inner.Add(i); outer.Add(inner.Snapshot()); } foreach (ISequenced<int> s in outer) { int sum = 0; s.Apply(delegate(int x) { sum += x; }); Console.WriteLine("Set has {0} elements and sum {1}", s.Count, sum); } }
AnagramClasses(SCG.IEnumerable <String> ss) { IDictionary <HashBag <char>, TreeSet <String> > classes = new HashDictionary <HashBag <char>, TreeSet <String> >(); foreach (String s in ss) { HashBag <char> anagram = AnagramClass(s); if (!classes.Find(ref anagram, out TreeSet <string> anagramClass)) { classes[anagram] = anagramClass = new TreeSet <String>(); } anagramClass.Add(s); } foreach (TreeSet <String> anagramClass in classes.Values) { if (anagramClass.Count > 1) { yield return(anagramClass); } } }
static IDictionary<String, TreeSet<int>> IndexFile(String filename) { IDictionary<String, TreeSet<int>> index = new TreeDictionary<String, TreeSet<int>>(); Regex delim = new Regex("[^a-zA-Z0-9]+"); using (TextReader rd = new StreamReader(filename)) { int lineno = 0; for (String line = rd.ReadLine(); line != null; line = rd.ReadLine()) { String[] res = delim.Split(line); lineno++; foreach (String s in res) if (s != "") { if (!index.Contains(s)) index[s] = new TreeSet<int>(); index[s].Add(lineno); } } } return index; }
public void readFromSortedMap(SortedMap <Integer, String> areaCodeMap) { SortedSet <String> descriptionsSet = new TreeSet <String>(); numOfEntries = areaCodeMap.size(); prefixSizeInBytes = getOptimalNumberOfBytesForValue(areaCodeMap.lastKey()); phoneNumberPrefixes = ByteBuffer.allocate(numOfEntries * prefixSizeInBytes); // Fill the phone number prefixes byte buffer, the set of possible lengths of prefixes and the // description set. int index = 0; foreach (Entry <Integer, String> entry in areaCodeMap.entrySet()) { int prefix = entry.getKey(); storeWordInBuffer(phoneNumberPrefixes, prefixSizeInBytes, index, prefix); possibleLengths.add((int)Math.log10(prefix) + 1); descriptionsSet.add(entry.getValue()); ++index; } createDescriptionPool(descriptionsSet, areaCodeMap); }
/// <exception cref="Org.Apache.Hadoop.Yarn.Server.Resourcemanager.Reservation.Exceptions.PlanningException /// "/> public virtual void Plan(Org.Apache.Hadoop.Yarn.Server.Resourcemanager.Reservation.Plan plan, IList <ReservationDefinition> contracts) { if (contracts != null) { throw new RuntimeException("SimpleCapacityReplanner cannot handle new reservation contracts" ); } ResourceCalculator resCalc = plan.GetResourceCalculator(); Org.Apache.Hadoop.Yarn.Api.Records.Resource totCap = plan.GetTotalCapacity(); long now = clock.GetTime(); // loop on all moment in time from now to the end of the check Zone // or the end of the planned sessions whichever comes first for (long t = now; (t < plan.GetLastEndTime() && t < (now + lengthOfCheckZone)); t += plan.GetStep()) { Org.Apache.Hadoop.Yarn.Api.Records.Resource excessCap = Resources.Subtract(plan.GetTotalCommittedResources (t), totCap); // if we are violating if (Resources.GreaterThan(resCalc, totCap, excessCap, ZeroResource)) { // sorted on reverse order of acceptance, so newest reservations first ICollection <ReservationAllocation> curReservations = new TreeSet <ReservationAllocation >(plan.GetReservationsAtTime(t)); for (IEnumerator <ReservationAllocation> resIter = curReservations.GetEnumerator() ; resIter.HasNext() && Resources.GreaterThan(resCalc, totCap, excessCap, ZeroResource );) { ReservationAllocation reservation = resIter.Next(); plan.DeleteReservation(reservation.GetReservationId()); excessCap = Resources.Subtract(excessCap, reservation.GetResourcesAtTime(t)); Log.Info("Removing reservation " + reservation.GetReservationId() + " to repair physical-resource constraints in the plan: " + plan.GetQueueName()); } } } }
public static void Main(String[] args) { ISorted <int> sorted = new TreeSet <int>(); sorted.AddAll(new[] { 23, 29, 31, 37, 41, 43, 47, 53 }); Console.WriteLine(sorted); if (args.Length == 1) { int n = int.Parse(args[0]); int res; if (Predecessor(sorted, n, out res)) { Console.WriteLine("{0} has predecessor {1}", n, res); } if (WeakPredecessor(sorted, n, out res)) { Console.WriteLine("{0} has weak predecessor {1}", n, res); } if (Successor(sorted, n, out res)) { Console.WriteLine("{0} has successor {1}", n, res); } if (WeakSuccessor(sorted, n, out res)) { Console.WriteLine("{0} has weak successor {1}", n, res); } } IterBeginEnd(sorted); IterBeginEndBackwards(sorted); IterIncExc(sorted, 29, 47); IterIncExcBackwards(sorted, 29, 47); IterIncEnd(sorted, 29); IterBeginExc(sorted, 47); IterIncInc(sorted, 29, 47); IterBeginInc(sorted, 47); IterExcExc(sorted, 29, 47); IterExcEnd(sorted, 29); IterExcInc(sorted, 29, 47); }
private static void DontModifyInner() { Console.WriteLine("\nMake a snapshot and add it to outer"); var outer = new HashSet <ISequenced <int> >(); for (var i = 0; i < 100; i++) { var inner = new TreeSet <int> { i, i + 1 }; outer.Add(inner); } IPersistentSorted <int> inner1 = new TreeSet <int>(), inner2 = new TreeSet <int>(), inner3 = new TreeSet <int>(); inner1.AddAll(new[] { 2, 3, 5, 7, 11 }); inner2.AddAll(inner1); inner2.Add(13); inner3.AddAll(inner1); // Take a snapshot and add it to outer: outer.Add(inner1.Snapshot()); Console.WriteLine($"inner1 in outer: {outer.Contains(inner1)}"); Console.WriteLine($"inner2 in outer: {outer.Contains(inner2)}"); Console.WriteLine($"inner3 in outer: {outer.Contains(inner3)}"); inner1.Add(13); Console.WriteLine("inner1 equals inner2: {outer.EqualityComparer.Equals(inner1, inner2)}"); Console.WriteLine("inner1 equals inner3: {outer.EqualityComparer.Equals(inner1, inner3)}"); Console.WriteLine("inner1 in outer: {outer.Contains(inner1)}"); Console.WriteLine("inner2 in outer: {outer.Contains(inner2)}"); Console.WriteLine("inner3 in outer: {outer.Contains(inner3)}"); Console.WriteLine("outer.Count: {outer.Count}"); }
internal int AddOccurrence(IOccurrence occ) { if (occ == null) { throw new ArgumentNullException("occ"); } if (occ.Depth != depth) { throw new InvalidOperationException("Depth mismatch."); } if (!TreeSet.ContainsKey(occ.TreeId)) { TreeSet.Add(occ.TreeId, new TreeOccSet(occ.TreeId)); } var temp = TreeSet[occ.TreeId].AddOccurrence(occ); RootOccurrenceCount += temp; return(temp); }
public bool Equals(TreeSet <T>?x, TreeSet <T>?y) { if (x is null) { return(y is null); } else if (y is null) { return(false); } if (x.Comparer.Equals(y.Comparer)) { return(x.SetEquals(y)); } bool found = false; foreach (T item1 in x) { found = false; foreach (T item2 in y) { if (_comparer.Equals(item1, item2)) { found = true; break; } } if (!found) { return(false); } } return(true); }
// Anti-pattern: modifying an inner collection while it is a // member of an outer one may cause it to be lost from the outer // collection. private static void ModifyInner() { Console.WriteLine("\nAnti-pattern: Add to outer, modify, lose"); var outer = new HashSet <ISequenced <int> >(); for (var i = 0; i < 100; i++) { ISequenced <int> inner = new TreeSet <int> { i, i + 1 }; outer.Add(inner); } ISequenced <int> inner1 = new TreeSet <int>(), inner2 = new TreeSet <int>(), inner3 = new TreeSet <int>(); inner1.AddAll(new[] { 2, 3, 5, 7, 11 }); inner2.AddAll(inner1); inner2.Add(13); inner3.AddAll(inner1); outer.Add(inner1); Console.WriteLine($"inner1 in outer: {outer.Contains(inner1)}"); Console.WriteLine($"inner2 in outer: {outer.Contains(inner2)}"); Console.WriteLine($"inner3 in outer: {outer.Contains(inner3)}"); inner1.Add(13); Console.WriteLine($"inner1 equals inner2: {outer.EqualityComparer.Equals(inner1, inner2)}"); Console.WriteLine($"inner1 equals inner3: {outer.EqualityComparer.Equals(inner1, inner3)}"); Console.WriteLine($"inner1 in outer: {outer.Contains(inner1)}"); Console.WriteLine($"inner2 in outer: {outer.Contains(inner2)}"); Console.WriteLine($"inner3 in outer: {outer.Contains(inner3)}"); Console.WriteLine($"outer.Count: {outer.Count}"); }
/** * 前缀查询 * * @param key 查询串 * @return 键值对 */ public Set <Map.Entry <String, V> > prefixSearch(String key) { Set <Map.Entry <String, V> > entrySet = new TreeSet <Map.Entry <String, V> >(); StringBuilder sb = new StringBuilder(key.substring(0, key.length() - 1)); BaseNode branch = this; char[] chars = key.toCharArray(); for (char aChar : chars) { if (branch == null) { return(entrySet); } branch = branch.getChild(aChar); } if (branch == null) { return(entrySet); } branch.walk(sb, entrySet); return(entrySet); }
private void Enumerate(IPriorityQueue <int> pq) { var set = new TreeSet <int>(); Fill(pq, set, 5000); var count = 0; foreach (var item in pq) { count++; Assert.True(set.Contains(item)); } Assert.Equal(5000, count); for (var i = 0; i < 2500; i++) { Assert.True(set.Contains(pq.Pop())); } Assert.Equal(2500, pq.Count); count = 0; foreach (var item in pq) { count++; Assert.True(set.Contains(item)); } Assert.Equal(2500, count); Fill(pq, set, 1000); count = 0; foreach (var item in pq) { count++; Assert.True(set.Contains(item)); } Assert.Equal(3500, count); }
private void assertPOSToken(TokenStream ts, String term, params String[] tags) { ts.IncrementToken(); assertEquals(term, ts.GetAttribute <ICharTermAttribute>().ToString()); TreeSet <String> actual = new TreeSet <String>(); TreeSet <String> expected = new TreeSet <String>(); foreach (StringBuilder b in ts.GetAttribute <IMorphosyntacticTagsAttribute>().Tags) { actual.add(b.toString()); } foreach (String s in tags) { expected.add(s); } if (!expected.equals(actual)) { Console.WriteLine("Expected:\n" + expected); Console.WriteLine("Actual:\n" + actual); assertEquals(expected, actual); } }
static void Main(String[] args) { ICollection <Person> nameColl = new HashSet <Person>(new Person.NameEqualityComparer()); ICollection <Person> dateColl = new TreeSet <Person>(new Person.DateComparer()); MakeLockstep(nameColl, dateColl); Person p1 = new Person("Peter", 19620625), p2 = new Person("Carsten", 19640627), p3 = new Person("Carsten", 19640628); nameColl.Add(p1); nameColl.Add(p2); dateColl.Add(p3); Console.WriteLine("dateColl = {0}", dateColl); Console.WriteLine("nameColl = {0}", nameColl); dateColl.Remove(p1); Console.WriteLine("dateColl = {0}", dateColl); Console.WriteLine("nameColl = {0}", nameColl); dateColl.Clear(); Console.WriteLine("dateColl = {0}", dateColl); Console.WriteLine("nameColl = {0}", nameColl); }
public IGeometry Union() { PointLocator locater = new PointLocator(); // use a set to eliminate duplicates, as required for union #if Goletas HashSet <ICoordinate> exteriorCoords = new HashSet <ICoordinate>(); #else TreeSet exteriorCoords = new TreeSet(); #endif foreach (IPoint point in PointExtracter.GetPoints(_pointGeom)) { ICoordinate coord = point.Coordinate; Locations loc = locater.Locate(coord, _otherGeom); if (loc == Locations.Exterior) { exteriorCoords.Add(coord); } } // if no points are in exterior, return the other geom if (exteriorCoords.Count == 0) { return(_otherGeom); } // make a puntal geometry of appropriate size IGeometry ptComp = null; ICoordinateSequence coords = _geomFact.CoordinateSequenceFactory.Create(exteriorCoords.ToArray()); ptComp = coords.Count == 1 ? (IGeometry)_geomFact.CreatePoint(coords.GetCoordinate(0)) : _geomFact.CreateMultiPoint(coords); // add point component to the other geometry return(GeometryCombiner.Combine(ptComp, _otherGeom)); }
internal static bool visitStates(SentenceHMMStateVisitor visitor, SentenceHMMState start, bool sorted) { object obj = SentenceHMMState.collectStates(start); if (sorted) { TreeSet treeSet = new TreeSet(new SentenceHMMState_3()); treeSet.addAll((Set)obj); obj = treeSet; } object obj2 = obj; Set set; if (obj2 != null) { if ((set = (obj2 as Set)) == null) { throw new IncompatibleClassChangeError(); } } else { set = null; } Iterator iterator = set.iterator(); while (iterator.hasNext()) { SentenceHMMState sentenceHMMState = (SentenceHMMState)iterator.next(); if (visitor.visit(sentenceHMMState)) { return(true); } } return(false); }
public IGeometry Union() { PointLocator locater = new PointLocator(); // use a set to eliminate duplicates, as required for union #if Goletas HashSet<ICoordinate> exteriorCoords = new HashSet<ICoordinate>(); #else TreeSet exteriorCoords = new TreeSet(); #endif foreach (IPoint point in PointExtracter.GetPoints(_pointGeom)) { ICoordinate coord = point.Coordinate; Locations loc = locater.Locate(coord, _otherGeom); if (loc == Locations.Exterior) { exteriorCoords.Add(coord); } } // if no points are in exterior, return the other geom if (exteriorCoords.Count == 0) { return _otherGeom; } // make a puntal geometry of appropriate size IGeometry ptComp = null; ICoordinateSequence coords = _geomFact.CoordinateSequenceFactory.Create(exteriorCoords.ToArray()); ptComp = coords.Count == 1 ? (IGeometry)_geomFact.CreatePoint(coords.GetCoordinate(0)) : _geomFact.CreateMultiPoint(coords); // add point component to the other geometry return GeometryCombiner.Combine(ptComp, _otherGeom); }
// Given a sequence of strings, return all non-trivial anagram // classes. // Using HashBag<char> and an unsequenced equalityComparer, this performs as // follows on 1600 MHz Mobile P4 and .Net 2.0 beta 1 (wall-clock // time): // 50 000 words 2 822 classes 2.0 sec // 100 000 words 5 593 classes 4.3 sec // 200 000 words 11 705 classes 8.8 sec // 300 000 words 20 396 classes 52.0 sec includes swapping // 347 165 words 24 428 classes 146.0 sec includes swapping // The maximal memory consumption is less than 180 MB. private static SCG.IEnumerable <SCG.IEnumerable <string> > AnagramClasses(SCG.IEnumerable <string> ss) { var classes = new HashDictionary <HashBag <char>, TreeSet <string> >(); foreach (var s in ss) { var anagram = AnagramClass(s); if (!classes.Find(ref anagram, out var anagramClass)) { classes[anagram] = anagramClass = new TreeSet <string>(); } anagramClass.Add(s); } foreach (var anagramClass in classes.Values) { if (anagramClass.Count > 1) { yield return(anagramClass); } } }
public override String[] childrenNames() { // throws BackingStoreException { lock (lockJ) { checkState(); TreeSet<String> result = new TreeSet<String>(cachedNode.keySet()); String[] names = childrenNamesSpi(); for (int i = 0; i < names.Length; i++) { result.add(names[i]); } return result.toArray(new String[result.size()]); } }
// test.cleanup(); // clean up after all to restore the system state /// <exception cref="System.IO.IOException"/> private void AnalyzeResult(long execTime, string resFileName, bool viewStats) { Path reduceFile = new Path(ReadDir, "part-00000"); DataInputStream @in; @in = new DataInputStream(fs.Open(reduceFile)); BufferedReader lines; lines = new BufferedReader(new InputStreamReader(@in)); long blocks = 0; long size = 0; long time = 0; float rate = 0; StringTokenizer badBlocks = null; long nrBadBlocks = 0; string line; while ((line = lines.ReadLine()) != null) { StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%"); string attr = tokens.NextToken(); if (attr.EndsWith("blocks")) { blocks = long.Parse(tokens.NextToken()); } else { if (attr.EndsWith("size")) { size = long.Parse(tokens.NextToken()); } else { if (attr.EndsWith("time")) { time = long.Parse(tokens.NextToken()); } else { if (attr.EndsWith("rate")) { rate = float.ParseFloat(tokens.NextToken()); } else { if (attr.EndsWith("badBlocks")) { badBlocks = new StringTokenizer(tokens.NextToken(), ";"); nrBadBlocks = badBlocks.CountTokens(); } } } } } } Vector <string> resultLines = new Vector <string>(); resultLines.AddItem("----- DistributedFSCheck ----- : "); resultLines.AddItem(" Date & time: " + Sharpen.Extensions.CreateDate (Runtime.CurrentTimeMillis())); resultLines.AddItem(" Total number of blocks: " + blocks); resultLines.AddItem(" Total number of files: " + nrFiles); resultLines.AddItem("Number of corrupted blocks: " + nrBadBlocks); int nrBadFilesPos = resultLines.Count; TreeSet <string> badFiles = new TreeSet <string>(); long nrBadFiles = 0; if (nrBadBlocks > 0) { resultLines.AddItem(string.Empty); resultLines.AddItem("----- Corrupted Blocks (file@offset) ----- : "); while (badBlocks.HasMoreTokens()) { string curBlock = badBlocks.NextToken(); resultLines.AddItem(curBlock); badFiles.AddItem(Sharpen.Runtime.Substring(curBlock, 0, curBlock.IndexOf('@'))); } nrBadFiles = badFiles.Count; } resultLines.InsertElementAt(" Number of corrupted files: " + nrBadFiles, nrBadFilesPos ); if (viewStats) { resultLines.AddItem(string.Empty); resultLines.AddItem("----- Performance ----- : "); resultLines.AddItem(" Total MBytes read: " + size / Mega); resultLines.AddItem(" Throughput mb/sec: " + (float)size * 1000.0 / (time * Mega)); resultLines.AddItem(" Average IO rate mb/sec: " + rate / 1000 / blocks); resultLines.AddItem(" Test exec time sec: " + (float)execTime / 1000); } TextWriter res = new TextWriter(new FileOutputStream(new FilePath(resFileName), true )); for (int i = 0; i < resultLines.Count; i++) { string cur = resultLines[i]; Log.Info(cur); res.WriteLine(cur); } }
TreeDictionary(SCG.IComparer <K> comparer, SCG.IEqualityComparer <K> equalityComparer) : base(comparer, equalityComparer) { pairs = sortedpairs = new TreeSet <KeyValuePair <K, V> >(new KeyValuePairComparer <K, V>(comparer)); }
private bool Load(InputStream stream) { DocumentBuilder parser = XMLUtils.GetXmlParser(); if (parser == null) { return(false); } try { IDocument xmlDocument = parser.Parse(stream); IElement root = xmlDocument.GetDocumentElement(); INodeList sentences = root.GetElementsByTagName(Sentence); for (int i = 0; i < sentences.GetLength(); i++) { IElement sentence = (IElement)sentences.Item(i); Lattice lattice = new Lattice(); //Create the node map ISortedSet <int> nodes = new TreeSet <int>(); INodeList xmlNodes = sentence.GetElementsByTagName(Node); for (int nodeIdx = 0; nodeIdx < xmlNodes.GetLength(); nodeIdx++) { IElement xmlNode = (IElement)xmlNodes.Item(nodeIdx); int nodeName = System.Convert.ToInt32(xmlNode.GetAttribute(NodeId)); nodes.Add(nodeName); } IDictionary <int, int> nodeMap = Generics.NewHashMap(); int realNodeIdx = 0; int lastBoundaryNode = -1; foreach (int nodeName_1 in nodes) { if (lastBoundaryNode == -1) { System.Diagnostics.Debug.Assert(nodeName_1 % NodeOffset == 0); lastBoundaryNode = realNodeIdx; } else { if (nodeName_1 % NodeOffset == 0) { ParserConstraint c = new ParserConstraint(lastBoundaryNode, realNodeIdx, ".*"); lattice.AddConstraint(c); } } nodeMap[nodeName_1] = realNodeIdx; realNodeIdx++; } //Read the edges INodeList xmlEdges = sentence.GetElementsByTagName(Edge); for (int edgeIdx = 0; edgeIdx < xmlEdges.GetLength(); edgeIdx++) { IElement xmlEdge = (IElement)xmlEdges.Item(edgeIdx); string segment = xmlEdge.GetAttribute(Segment); double weight = double.Parse(xmlEdge.GetAttribute(Weight)); //Input weights should be log scale int from = System.Convert.ToInt32(xmlEdge.GetAttribute(FromNode)); int normFrom = nodeMap[from]; int to = System.Convert.ToInt32(xmlEdge.GetAttribute(ToNode)); int normTo = nodeMap[to]; LatticeEdge e = new LatticeEdge(segment, weight, normFrom, normTo); // Set attributes below here INodeList xmlAttrs = xmlEdge.GetElementsByTagName(EAttrNode); for (int attrIdx = 0; attrIdx < xmlAttrs.GetLength(); attrIdx++) { IElement xmlAttr = (IElement)xmlAttrs.Item(attrIdx); string key = xmlAttr.GetAttribute(EAttr); string value = xmlAttr.GetAttribute(EAttrVal); e.SetAttr(key, value); } lattice.AddEdge(e); } //Configure for parsing in ExhaustivePCFG parser lattice.AddBoundary(); lattices.Add(lattice); } } catch (IOException e) { System.Console.Error.Printf("%s: Error reading XML from input stream.%n", this.GetType().FullName); Sharpen.Runtime.PrintStackTrace(e); return(false); } catch (SAXException e) { Sharpen.Runtime.PrintStackTrace(e); return(false); } return(true); }
//////////////////////////////////////////////////////////////////////////// // // Expression Parsing Functions // //////////////////////////////////////////////////////////////////////////// /// <summary> /// Builds the expression. /// </summary> /// <param name="expression">The expression.</param> protected void BuildExpression(string expression) { expressionParsed = true; try { if (seconds == null) { seconds = new TreeSet<int>(); } if (minutes == null) { minutes = new TreeSet<int>(); } if (hours == null) { hours = new TreeSet<int>(); } if (daysOfMonth == null) { daysOfMonth = new TreeSet<int>(); } if (months == null) { months = new TreeSet<int>(); } if (daysOfWeek == null) { daysOfWeek = new TreeSet<int>(); } if (years == null) { years = new TreeSet<int>(); } int exprOn = Second; string[] exprsTok = expression.Trim().Split(new char[] { ' ', '\t', '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); foreach (string exprTok in exprsTok) { string expr = exprTok.Trim(); if (expr.Length == 0) { continue; } if (exprOn > Year) { break; } // throw an exception if L is used with other days of the month if (exprOn == DayOfMonth && expr.IndexOf('L') != -1 && expr.Length > 1 && expr.IndexOf(",") >= 0) { throw new FormatException("Support for specifying 'L' and 'LW' with other days of the month is not implemented"); } // throw an exception if L is used with other days of the week if (exprOn == DayOfWeek && expr.IndexOf('L') != -1 && expr.Length > 1 && expr.IndexOf(",") >= 0) { throw new FormatException("Support for specifying 'L' with other days of the week is not implemented"); } if (exprOn == DayOfWeek && expr.IndexOf('#') != -1 && expr.IndexOf('#', expr.IndexOf('#') + 1) != -1) { throw new FormatException("Support for specifying multiple \"nth\" days is not imlemented."); } string[] vTok = expr.Split(','); foreach (string v in vTok) { StoreExpressionVals(0, v, exprOn); } exprOn++; } if (exprOn <= DayOfWeek) { throw new FormatException("Unexpected end of expression."); } if (exprOn <= Year) { StoreExpressionVals(0, "*", Year); } ISortedSet<int> dow = GetSet(DayOfWeek); ISortedSet<int> dom = GetSet(DayOfMonth); // Copying the logic from the UnsupportedOperationException below bool dayOfMSpec = !dom.Contains(NoSpec); bool dayOfWSpec = !dow.Contains(NoSpec); if (dayOfMSpec && !dayOfWSpec) { // skip } else if (dayOfWSpec && !dayOfMSpec) { // skip } else { throw new FormatException("Support for specifying both a day-of-week AND a day-of-month parameter is not implemented."); } } catch (FormatException) { throw; } catch (Exception e) { throw new FormatException(string.Format(CultureInfo.InvariantCulture, "Illegal cron expression format ({0})", e)); } }
public override void allocate() { this.vocabulary.clear(); this.logProbs.clear(); this.logBackoffs.clear(); HashMap hashMap = new HashMap(); HashMap hashMap2 = new HashMap(); HashMap hashMap3 = new HashMap(); int num = 0; Iterator iterator = this.sentences.iterator(); while (iterator.hasNext()) { string text = (string)iterator.next(); string[] array = String.instancehelper_split(text, "\\s+"); ArrayList arrayList = new ArrayList(); arrayList.add(this.dictionary.getSentenceStartWord()); string[] array2 = array; int num2 = array2.Length; for (int i = 0; i < num2; i++) { string text2 = array2[i]; if (String.instancehelper_length(text2) != 0) { this.vocabulary.add(text2); Word word = this.dictionary.getWord(text2); if (word == null) { arrayList.add(Word.__UNKNOWN); } else { arrayList.add(word); } } } arrayList.add(this.dictionary.getSentenceEndWord()); if (arrayList.size() > 0) { HashMap hashMap4 = hashMap; this.addSequence(hashMap4, new WordSequence(new Word[] { (Word)arrayList.get(0) })); num++; } if (arrayList.size() > 1) { num++; HashMap hashMap5 = hashMap; this.addSequence(hashMap5, new WordSequence(new Word[] { (Word)arrayList.get(1) })); HashMap hashMap6 = hashMap2; this.addSequence(hashMap6, new WordSequence(new Word[] { (Word)arrayList.get(0), (Word)arrayList.get(1) })); } for (int j = 2; j < arrayList.size(); j++) { num++; HashMap hashMap7 = hashMap; this.addSequence(hashMap7, new WordSequence(new Word[] { (Word)arrayList.get(j) })); HashMap hashMap8 = hashMap2; this.addSequence(hashMap8, new WordSequence(new Word[] { (Word)arrayList.get(j - 1), (Word)arrayList.get(j) })); HashMap hashMap9 = hashMap3; this.addSequence(hashMap9, new WordSequence(new Word[] { (Word)arrayList.get(j - 2), (Word)arrayList.get(j - 1), (Word)arrayList.get(j) })); } } float num3 = 0.5f; float num4 = 1f - num3; HashMap hashMap10 = new HashMap(); Iterator iterator2 = hashMap.entrySet().iterator(); while (iterator2.hasNext()) { Map.Entry entry = (Map.Entry)iterator2.next(); hashMap10.put(entry.getKey(), Float.valueOf((float)((Integer)entry.getValue()).intValue() * num4 / (float)num)); } LogMath logMath = LogMath.getLogMath(); float num5 = logMath.linearToLog((double)this.unigramWeight); float num6 = logMath.linearToLog((double)(1f - this.unigramWeight)); float num7 = -logMath.linearToLog((double)hashMap10.size()); TreeSet treeSet = new TreeSet(hashMap.keySet()); Iterator iterator3 = new TreeSet(hashMap2.keySet()).iterator(); WordSequence wordSequence = (!iterator3.hasNext()) ? null : ((WordSequence)iterator3.next()); Iterator iterator4 = treeSet.iterator(); while (iterator4.hasNext()) { WordSequence wordSequence2 = (WordSequence)iterator4.next(); float num8 = logMath.linearToLog((double)((Float)hashMap10.get(wordSequence2)).floatValue()); num8 += num5; num8 = logMath.addAsLinear(num8, num7 + num6); this.logProbs.put(wordSequence2, Float.valueOf(num8)); float num9 = 0f; while (wordSequence != null) { int num10 = wordSequence.getOldest().compareTo(wordSequence2); if (num10 > 0) { break; } if (num10 == 0) { num9 += ((Float)hashMap10.get(wordSequence.getNewest())).floatValue(); } wordSequence = ((!iterator3.hasNext()) ? null : ((WordSequence)iterator3.next())); } this.logBackoffs.put(wordSequence2, Float.valueOf(logMath.linearToLog((double)(num3 / (1f - num9))))); } HashMap hashMap11 = new HashMap(); Iterator iterator5 = hashMap2.entrySet().iterator(); while (iterator5.hasNext()) { Map.Entry entry2 = (Map.Entry)iterator5.next(); int num11 = ((Integer)hashMap.get(((WordSequence)entry2.getKey()).getOldest())).intValue(); hashMap11.put(entry2.getKey(), Float.valueOf((float)((Integer)entry2.getValue()).intValue() * num4 / (float)num11)); } TreeSet treeSet2 = new TreeSet(hashMap2.keySet()); iterator3 = new TreeSet(hashMap3.keySet()).iterator(); wordSequence = ((!iterator3.hasNext()) ? null : ((WordSequence)iterator3.next())); Iterator iterator6 = treeSet2.iterator(); while (iterator6.hasNext()) { WordSequence wordSequence3 = (WordSequence)iterator6.next(); this.logProbs.put(wordSequence3, Float.valueOf(logMath.linearToLog((double)((Float)hashMap11.get(wordSequence3)).floatValue()))); float num12 = 0f; while (wordSequence != null) { int num13 = wordSequence.getOldest().compareTo(wordSequence3); if (num13 > 0) { break; } if (num13 == 0) { num12 += ((Float)hashMap11.get(wordSequence.getNewest())).floatValue(); } wordSequence = ((!iterator3.hasNext()) ? null : ((WordSequence)iterator3.next())); } this.logBackoffs.put(wordSequence3, Float.valueOf(logMath.linearToLog((double)(num3 / (1f - num12))))); } iterator6 = hashMap3.entrySet().iterator(); while (iterator6.hasNext()) { Map.Entry entry3 = (Map.Entry)iterator6.next(); float num12 = (float)((Integer)entry3.getValue()).intValue() * num4; num12 /= (float)((Integer)hashMap2.get(((WordSequence)entry3.getKey()).getOldest())).intValue(); this.logProbs.put(entry3.getKey(), Float.valueOf(logMath.linearToLog((double)num12))); } }
protected IpObject(string _name) { this.Name = _name; this.Keyframes = new TreeSet <Pose3D>(); }
public void FilterList(List <Report> filteredList, TreeSet sectionHeaders) { MatchedList = filteredList; sectionHeaderTreeSet = sectionHeaders; NotifyDataSetChanged(); }
/// <summary> /// Initializes a new instance of the <see cref="Option"/> class. /// </summary> /// <param name="attribute">The attribute describing this option.</param> /// <param name="memberInfo">The <see cref="MemberInfo"/> object pointing to the member to which the attribute was applied.</param> /// <param name="cmdLineObject">The command line manager object.</param> /// <param name="optionGroups">A complete list of all available option groups.</param> /// <param name="numberFormatInfo">The number format info to use for parsing numerical arguments.</param> public Option(CommandLineOptionAttribute attribute, MemberInfo memberInfo, object cmdLineObject, ICollection<OptionGroup> optionGroups, NumberFormatInfo numberFormatInfo) { mObject = cmdLineObject; mMember = memberInfo; mUsage = attribute.BoolFunction; mDescription = attribute.Description; mNumberFormatInfo = numberFormatInfo ?? CultureInfo.CurrentCulture.NumberFormat; mDefaultValue = attribute.DefaultAssignmentValue; mMinValue = attribute.MinValue; mMaxValue = attribute.MaxValue; // Check the validity of the member for which this attribute was defined switch (memberInfo.MemberType) { case MemberTypes.Field: FieldInfo fieldInfo = (FieldInfo)memberInfo; if (fieldInfo.IsInitOnly || fieldInfo.IsLiteral) throw new AttributeException(typeof(CommandLineOptionAttribute), memberInfo, "Illegal field for this attribute; field must be writeable"); mOptionType = fieldInfo.FieldType; break; case MemberTypes.Method: MethodInfo method = (MethodInfo)memberInfo; ParameterInfo[] parameters = method.GetParameters(); if (parameters.Length != 1) throw new AttributeException(typeof(CommandLineOptionAttribute), memberInfo, "Illegal method for this attribute; the method must accept exactly one parameter"); if (parameters[0].IsOut) throw new AttributeException(typeof(CommandLineOptionAttribute), memberInfo, "Illegal method for this attribute; the parameter of the method must not be an out parameter"); if (IsArray(parameters[0].ParameterType) || IsCollectionType(parameters[0].ParameterType)) throw new AttributeException(typeof(CommandLineOptionAttribute), memberInfo, "Illegal method for this attribute; the parameter of the method must be a non-array and non-collection type"); mOptionType = parameters[0].ParameterType; break; case MemberTypes.Property: PropertyInfo propInfo = (PropertyInfo)memberInfo; if (!propInfo.CanWrite && !IsCollectionType(propInfo.PropertyType)) throw new AttributeException(typeof(CommandLineOptionAttribute), memberInfo, "Illegal property for this attribute; property for non-collection type must be writable"); if (!propInfo.CanRead && IsCollectionType(propInfo.PropertyType)) throw new AttributeException(typeof(CommandLineOptionAttribute), memberInfo, "Illegal property for this attribute; property for collection type must be readable"); if (!(propInfo.CanRead && propInfo.CanWrite) && IsArray(propInfo.PropertyType)) throw new AttributeException(typeof(CommandLineOptionAttribute), memberInfo, "Illegal property for this attribute; property representing array type must be both readable and writeable"); mOptionType = propInfo.PropertyType; break; default: throw new AttributeException(typeof(CommandLineOptionAttribute), memberInfo, "Illegal member for this attribute; member must be a property, method (accepting one parameter) or a field"); } mMinOccurs = attribute.MinOccurs; // MaxOccurs does not have a default value (since this is different for various types), so we set it here. if (!attribute.IsMaxOccursSet) { // Use default setting for MaxOccurs if (IsArray(mOptionType) || IsCollectionType(mOptionType)) mMaxOccurs = 0; // Unlimited else mMaxOccurs = 1; } else { mMaxOccurs = attribute.MaxOccurs; } if (mMinOccurs > mMaxOccurs && mMaxOccurs > 0) throw new AttributeException(typeof(CommandLineOptionAttribute), memberInfo, String.Format(CultureInfo.CurrentUICulture, "MinOccurs ({0}) must not be larger than MaxOccurs ({1})", mMinOccurs, mMaxOccurs)); if (mMaxOccurs != 1 && !(IsArray(mOptionType) || IsCollectionType(mOptionType)) && mMember.MemberType != MemberTypes.Method) throw new AttributeException(typeof(CommandLineOptionAttribute), memberInfo, "Invalid cardinality for member; MaxOccurs must be equal to one (1) for any non-array or non-collection type"); CommandLineManagerAttribute objectAttr = (CommandLineManagerAttribute)Attribute.GetCustomAttribute(mObject.GetType(), typeof(CommandLineManagerAttribute)); if (objectAttr == null) throw new AttributeException(String.Format(CultureInfo.CurrentUICulture, "Class {0} contains a CommandLineOptionAttribute, but does not have the attribute CommandLineObjectAttribute", mObject.GetType().FullName)); // Assign the name of this option from the member itself if no name is explicitly provided if (attribute.Name == null) { mName = memberInfo.Name; } else { mName = attribute.Name; } // Find the group (if any) that this option belongs to in the list of available option groups if (attribute.GroupId != null) { if (!optionGroups.Find(new Fun<OptionGroup, bool>( delegate(OptionGroup searchGroup) { return attribute.GroupId.Equals(searchGroup.Id); }), out mGroup)) { throw new LogicException(String.Format(CultureInfo.CurrentUICulture, "Undefined group {0} referenced from member {1} in {2}", attribute.GroupId, memberInfo.Name, cmdLineObject.GetType().FullName)); } mGroup.Options.Add(mName, this); } // Recursively find out if this option requires explicit assignment if (attribute.DoesRequireExplicitAssignment.HasValue) { mRequireExplicitAssignment = attribute.DoesRequireExplicitAssignment.Value; } else if (mGroup != null) { mRequireExplicitAssignment = mGroup.RequireExplicitAssignment; } else { mRequireExplicitAssignment = objectAttr.RequireExplicitAssignment; } // Make sure the type of the field, property or method is supported if (!IsTypeSupported(mOptionType)) throw new AttributeException(typeof(CommandLineOptionAttribute), mMember, "Unsupported type for command line option."); // Make sure MinValue and MaxValue is not specified for any non-numerical type. if (mMinValue != null || mMaxValue != null) { if (!IsNumericalType) { throw new AttributeException(typeof(CommandLineOptionAttribute), mMember, "MinValue and MaxValue must not be specified for a non-numerical type"); } else if (!mMinValue.GetType().IsAssignableFrom(GetBaseType(mOptionType))) { throw new AttributeException(typeof(CommandLineOptionAttribute), mMember, "Illegal value for MinValue or MaxValue, not the same type as option"); } } // Some special checks for numerical types if (IsNumericalType) { // Assign the default MinValue if it was not set and this is a numerical type if (IsNumericalType && mMinValue == null) { mMinValue = GetBaseType(mOptionType).GetField("MinValue", BindingFlags.Static | BindingFlags.Public).GetValue(null); } // Assign the defaul MaxValue if it was not set and this is a numerical type if (IsNumericalType && mMaxValue == null) { mMaxValue = GetBaseType(mOptionType).GetField("MaxValue", BindingFlags.Static | BindingFlags.Public).GetValue(null); } // Check that MinValue <= MaxValue if (IsNumericalType && ((IComparable)MinValue).CompareTo(MaxValue) > 0) { throw new AttributeException(typeof(CommandLineOptionAttribute), mMember, "MinValue must not be greater than MaxValue"); } } // Check that the DefaultValue is not set if the option does not require explicit assignment. // If it were allowed, it would be ambiguos for an option separated from a value by a white space character // since we wouldn't know whether that would set the default value or assign it to the following value. if (mDefaultValue != null && !mRequireExplicitAssignment) { throw new AttributeException(typeof(CommandLineOptionAttribute), mMember, "DefaultValue must not be specified when RequireExplicitAssignment is set to false"); } // Check that the type of any set default value matches that of this option, or is string, and // convert it to the type of this option. if (mDefaultValue != null) { if (mDefaultValue.GetType() == typeof(string)) { try { mDefaultValue = GetCheckedValueForSetOperation(mDefaultValue); } catch (OverflowException) { throw new AttributeException(typeof(CommandLineOptionAttribute), mMember, "DefaultValue was less than MinValue or greater than MaxValue for this option"); } catch (FormatException) { throw new AttributeException(typeof(CommandLineOptionAttribute), mMember, "DefaultValue was not specified in the correct format for the type of this option"); } } else if (GetBaseType(mOptionType) != mDefaultValue.GetType()) { try { mDefaultValue = Convert.ChangeType(mDefaultValue, GetBaseType(mOptionType), mNumberFormatInfo); } catch (InvalidCastException) { throw new AttributeException(typeof(CommandLineOptionAttribute), mMember, "The type of the DefaultValue specified is not compatible with the type of the member to which this attribute applies"); } } } // If this is an enum, check that it doesn't have members only distinguishable by case, and // add the members to the mEnumerationValues set for speedy access when checking values assigned // to this member. Type type = GetBaseType(mOptionType); if (type.IsEnum) { mEnumerationValues = new TreeSet<string>(StringComparer.OrdinalIgnoreCase); foreach (FieldInfo field in type.GetFields()) { if (field.IsLiteral) { if (mEnumerationValues.Contains(field.Name)) { throw new AttributeException(typeof(CommandLineOptionAttribute), mMember, "This enumeration is not allowed as a command line option since it contains fields that differ only by case"); } mEnumerationValues.Add(field.Name); } } } }
public virtual void TestCleanWithPaths() { // create status StatusCommand command = git.Status(); Status status = command.Call(); ICollection<string> files = status.GetUntracked(); NUnit.Framework.Assert.IsTrue(files.Count > 0); // run clean with setPaths ICollection<string> paths = new TreeSet<string>(); paths.AddItem("File3.txt"); ICollection<string> cleanedFiles = git.Clean().SetPaths(paths).Call(); status = git.Status().Call(); files = status.GetUntracked(); NUnit.Framework.Assert.AreEqual(1, files.Count); NUnit.Framework.Assert.IsTrue(cleanedFiles.Contains("File3.txt")); NUnit.Framework.Assert.IsFalse(cleanedFiles.Contains("File2.txt")); }
public MTMessagesContainer(MTMessage[] messages) { TreeSet.__ <clinit>(); this.messages = new TreeSet(new 1 (this)); Collections.addAll(this.messages, messages); }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="System.InvalidOperationException"></exception> /// <exception cref="System.Exception"></exception> public virtual void TestRacyGitDetection() { TreeSet<long> modTimes = new TreeSet<long>(); FilePath lastFile; // wait to ensure that modtimes of the file doesn't match last index // file modtime modTimes.AddItem(Sharpen.Extensions.ValueOf(FsTick(db.GetIndexFile()))); // create two files AddToWorkDir("a", "a"); lastFile = AddToWorkDir("b", "b"); // wait to ensure that file-modTimes and therefore index entry modTime // doesn't match the modtime of index-file after next persistance modTimes.AddItem(Sharpen.Extensions.ValueOf(FsTick(lastFile))); // now add both files to the index. No racy git expected ResetIndex(new FileTreeIteratorWithTimeControl(db, modTimes)); NUnit.Framework.Assert.AreEqual("[a, mode:100644, time:t0, length:1, content:a]" + "[b, mode:100644, time:t0, length:1, content:b]", IndexState(SMUDGE | MOD_TIME | LENGTH | CONTENT)); // Remember the last modTime of index file. All modifications times of // further modification are translated to this value so it looks that // files have been modified in the same time slot as the index file modTimes.AddItem(Sharpen.Extensions.ValueOf(db.GetIndexFile().LastModified())); // modify one file AddToWorkDir("a", "a2"); // now update the index the index. 'a' has to be racily clean -- because // it's modification time is exactly the same as the previous index file // mod time. ResetIndex(new FileTreeIteratorWithTimeControl(db, modTimes)); db.ReadDirCache(); // although racily clean a should not be reported as being dirty NUnit.Framework.Assert.AreEqual("[a, mode:100644, time:t1, smudged, length:0, content:a2]" + "[b, mode:100644, time:t0, length:1, content:b]", IndexState(SMUDGE | MOD_TIME | LENGTH | CONTENT)); }
/// <exception cref="System.InvalidOperationException"></exception> /// <exception cref="System.IO.IOException"></exception> /// <exception cref="System.Exception"></exception> public virtual void TestIterator() { TreeSet<long> modTimes = new TreeSet<long>(); FilePath lastFile = null; for (int i = 0; i < 10; i++) { lastFile = new FilePath(db.WorkTree, "0." + i); FileUtils.CreateNewFile(lastFile); if (i == 5) { FsTick(lastFile); } } modTimes.AddItem(Sharpen.Extensions.ValueOf(FsTick(lastFile))); for (int i_1 = 0; i_1 < 10; i_1++) { lastFile = new FilePath(db.WorkTree, "1." + i_1); FileUtils.CreateNewFile(lastFile); } modTimes.AddItem(Sharpen.Extensions.ValueOf(FsTick(lastFile))); for (int i_2 = 0; i_2 < 10; i_2++) { lastFile = new FilePath(db.WorkTree, "2." + i_2); FileUtils.CreateNewFile(lastFile); if (i_2 % 4 == 0) { FsTick(lastFile); } } FileTreeIteratorWithTimeControl fileIt = new FileTreeIteratorWithTimeControl(db, modTimes); NameConflictTreeWalk tw = new NameConflictTreeWalk(db); tw.AddTree(fileIt); tw.Recursive = true; FileTreeIterator t; long t0 = 0; for (int i_3 = 0; i_3 < 10; i_3++) { NUnit.Framework.Assert.IsTrue(tw.Next()); t = tw.GetTree<FileTreeIterator>(0); if (i_3 == 0) { t0 = t.GetEntryLastModified(); } else { NUnit.Framework.Assert.AreEqual(t0, t.GetEntryLastModified()); } } long t1 = 0; for (int i_4 = 0; i_4 < 10; i_4++) { NUnit.Framework.Assert.IsTrue(tw.Next()); t = tw.GetTree<FileTreeIterator>(0); if (i_4 == 0) { t1 = t.GetEntryLastModified(); NUnit.Framework.Assert.IsTrue(t1 > t0); } else { NUnit.Framework.Assert.AreEqual(t1, t.GetEntryLastModified()); } } long t2 = 0; for (int i_5 = 0; i_5 < 10; i_5++) { NUnit.Framework.Assert.IsTrue(tw.Next()); t = tw.GetTree<FileTreeIterator>(0); if (i_5 == 0) { t2 = t.GetEntryLastModified(); NUnit.Framework.Assert.IsTrue(t2 > t1); } else { NUnit.Framework.Assert.AreEqual(t2, t.GetEntryLastModified()); } } }
internal bool ContainsTree(TreeId treeId) { return(TreeSet.ContainsKey(treeId)); }
//$goals 5 //$benchmark public void containsTest(TreeSet treeSet, int aKey) { if (treeSet!=null && treeSet.repOK()) { boolean ret_val = treeSet.contains(aKey); } }
/// <exception cref="SSLException"/> public virtual void Check(string[] hosts, string[] cns, string[] subjectAlts, bool ie6, bool strictWithSubDomains) { // Build up lists of allowed hosts For logging/debugging purposes. StringBuilder buf = new StringBuilder(32); buf.Append('<'); for (int i = 0; i < hosts.Length; i++) { string h = hosts[i]; h = h != null?StringUtils.ToLowerCase(h.Trim()) : string.Empty; hosts[i] = h; if (i > 0) { buf.Append('/'); } buf.Append(h); } buf.Append('>'); string hostnames = buf.ToString(); // Build the list of names we're going to check. Our DEFAULT and // STRICT implementations of the HostnameVerifier only use the // first CN provided. All other CNs are ignored. // (Firefox, wget, curl, Sun Java 1.4, 5, 6 all work this way). ICollection <string> names = new TreeSet <string>(); if (cns != null && cns.Length > 0 && cns[0] != null) { names.AddItem(cns[0]); if (ie6) { for (int i_1 = 1; i_1 < cns.Length; i_1++) { names.AddItem(cns[i_1]); } } } if (subjectAlts != null) { for (int i_1 = 0; i_1 < subjectAlts.Length; i_1++) { if (subjectAlts[i_1] != null) { names.AddItem(subjectAlts[i_1]); } } } if (names.IsEmpty()) { string msg = "Certificate for " + hosts[0] + " doesn't contain CN or DNS subjectAlt"; throw new SSLException(msg); } // StringBuffer for building the error message. buf = new StringBuilder(); bool match = false; for (IEnumerator <string> it = names.GetEnumerator(); it.HasNext();) { // Don't trim the CN, though! string cn = StringUtils.ToLowerCase(it.Next()); // Store CN in StringBuffer in case we need to report an error. buf.Append(" <"); buf.Append(cn); buf.Append('>'); if (it.HasNext()) { buf.Append(" OR"); } // The CN better have at least two dots if it wants wildcard // action. It also can't be [*.co.uk] or [*.co.jp] or // [*.org.uk], etc... bool doWildcard = cn.StartsWith("*.") && cn.LastIndexOf('.') >= 0 && !IsIP4Address (cn) && AcceptableCountryWildcard(cn); for (int i_1 = 0; i_1 < hosts.Length; i_1++) { string hostName = StringUtils.ToLowerCase(hosts[i_1].Trim()); if (doWildcard) { match = hostName.EndsWith(Runtime.Substring(cn, 1)); if (match && strictWithSubDomains) { // If we're in strict mode, then [*.foo.com] is not // allowed to match [a.b.foo.com] match = CountDots(hostName) == CountDots(cn); } } else { match = hostName.Equals(cn); } if (match) { goto out_break; } } out_continue :; } out_break :; if (!match) { throw new SSLException("hostname in certificate didn't match: " + hostnames + " !=" + buf); } }
public FileTreeIteratorWithTimeControl(FileTreeIterator p, Repository repo, TreeSet <long> modTimes) : base(p, repo.WorkTree, repo.FileSystem) { this.modTimes = modTimes; }
/// <summary> /// Expert: highlights the top-N passages from multiple fields, /// for the provided int[] docids, to custom object as /// returned by the <see cref="PassageFormatter"/>. Use /// this API to render to something other than <see cref="string"/>. /// </summary> /// <param name="fieldsIn">field names to highlight. Must have a stored string value and also be indexed with offsets.</param> /// <param name="query">query to highlight.</param> /// <param name="searcher">searcher that was previously used to execute the query.</param> /// <param name="docidsIn">containing the document IDs to highlight.</param> /// <param name="maxPassagesIn">The maximum number of top-N ranked passages per-field used to form the highlighted snippets.</param> /// <returns> /// <see cref="T:IDictionary{string, object[]}"/> keyed on field name, containing the array of formatted snippets /// corresponding to the documents in <paramref name="docidsIn"/>. /// If no highlights were found for a document, the /// first <paramref name="maxPassagesIn"/> from the field will /// be returned. /// </returns> /// <exception cref="IOException">if an I/O error occurred during processing</exception> /// <exception cref="ArgumentException">if <c>field</c> was indexed without <see cref="IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS"/></exception> protected internal virtual IDictionary <string, object[]> HighlightFieldsAsObjects(string[] fieldsIn, Query query, IndexSearcher searcher, int[] docidsIn, int[] maxPassagesIn) { if (fieldsIn.Length < 1) { throw new ArgumentException("fieldsIn must not be empty"); } if (fieldsIn.Length != maxPassagesIn.Length) { throw new ArgumentException("invalid number of maxPassagesIn"); } IndexReader reader = searcher.IndexReader; Query rewritten = Rewrite(query); TreeSet <Term> queryTerms = new TreeSet <Term>(); rewritten.ExtractTerms(queryTerms); IndexReaderContext readerContext = reader.Context; IList <AtomicReaderContext> leaves = readerContext.Leaves; // Make our own copies because we sort in-place: int[] docids = new int[docidsIn.Length]; System.Array.Copy(docidsIn, 0, docids, 0, docidsIn.Length); string[] fields = new string[fieldsIn.Length]; System.Array.Copy(fieldsIn, 0, fields, 0, fieldsIn.Length); int[] maxPassages = new int[maxPassagesIn.Length]; System.Array.Copy(maxPassagesIn, 0, maxPassages, 0, maxPassagesIn.Length); // sort for sequential io ArrayUtil.TimSort(docids); new InPlaceMergeSorterAnonymousHelper(fields, maxPassages).Sort(0, fields.Length); // pull stored data: IList <string[]> contents = LoadFieldValues(searcher, fields, docids, maxLength); IDictionary <string, object[]> highlights = new Dictionary <string, object[]>(); for (int i = 0; i < fields.Length; i++) { string field = fields[i]; int numPassages = maxPassages[i]; Term floor = new Term(field, ""); Term ceiling = new Term(field, UnicodeUtil.BIG_TERM); // LUCENENET NOTE: System.Collections.Generic.SortedSet<T>.GetViewBetween ceiling is inclusive. // However, in Java, subSet ceiling is exclusive. Also, // SortedSet<T> doesn't seem to have the correct logic, but C5.TreeSet<T> does. var fieldTerms = queryTerms.RangeFromTo(floor, ceiling); //SubSet(floor, ceiling); // TODO: should we have some reasonable defaults for term pruning? (e.g. stopwords) // Strip off the redundant field: BytesRef[] terms = new BytesRef[fieldTerms.Count]; int termUpto = 0; foreach (Term term in fieldTerms) { terms[termUpto++] = term.Bytes; } IDictionary <int, object> fieldHighlights = HighlightField(field, contents[i], GetBreakIterator(field), terms, docids, leaves, numPassages, query); object[] result = new object[docids.Length]; for (int j = 0; j < docidsIn.Length; j++) { fieldHighlights.TryGetValue(docidsIn[j], out result[j]); } highlights[field] = result; } return(highlights); }
public FileTreeIteratorWithTimeControl(FileTreeIterator p, FilePath f, FS fs, TreeSet <long> modTimes) : base(p, f, fs) { this.modTimes = modTimes; }
public FileStatus[] listStatus(Path path) { path = path.makeQualified(this); List<FileStatus> result = new ArrayList<FileStatus>(); String pathname = path.toString(); String pathnameAsDir = pathname + "/"; Set<String> dirs = new TreeSet<String>(); for(MockFile file: files) { String filename = file.path.toString(); if (pathname.equals(filename)) { return new FileStatus[]{createStatus(file)}; } else if (filename.startsWith(pathnameAsDir)) { String tail = filename.substring(pathnameAsDir.length()); int nextSlash = tail.indexOf('/'); if (nextSlash > 0) { dirs.add(tail.substring(0, nextSlash)); } else { result.add(createStatus(file)); } } } // for each directory add it once for(String dir: dirs) { result.add(createDirectory(new MockPath(this, pathnameAsDir + dir))); } return result.toArray(new FileStatus[result.size()]); }
public FileTreeIteratorWithTimeControl(Repository repo, TreeSet<long> modTimes) : base(repo) { this.modTimes = modTimes; }
public BasicCookieStore() : base() { this.cookies = new TreeSet <Org.Apache.Http.Cookie.Cookie>(new CookieIdentityComparator ()); }
public FileTreeIteratorWithTimeControl(FilePath f, FS fs, TreeSet<long> modTimes) : base(f, fs, new Config().Get(WorkingTreeOptions.KEY)) { this.modTimes = modTimes; }
/** * @see <a href="http://en.wikipedia.org/wiki/Sweep_line_algorithm">Sweep line algorithm</a> */ private void SweepCleanColumns(CT_Cols cols, CT_Col[] flattenedColsArray, CT_Col overrideColumn) { List <CT_Col> flattenedCols = new List <CT_Col>(flattenedColsArray); TreeSet <CT_Col> currentElements = new TreeSet <CT_Col>(CTColComparator.BY_MAX); IEnumerator <CT_Col> flIter = flattenedCols.GetEnumerator(); CT_Col haveOverrideColumn = null; long lastMaxIndex = 0; long currentMax = 0; IList <CT_Col> toRemove = new List <CT_Col>(); int pos = -1; //while (flIter.hasNext()) while ((pos + 1) < flattenedCols.Count) { //CTCol col = flIter.next(); pos++; CT_Col col = flattenedCols[pos]; long currentIndex = col.min; long colMax = col.max; long nextIndex = (colMax > currentMax) ? colMax : currentMax; //if (flIter.hasNext()) { if ((pos + 1) < flattenedCols.Count) { //nextIndex = flIter.next().getMin(); nextIndex = flattenedCols[pos + 1].min; //flIter.previous(); } IEnumerator <CT_Col> iter = currentElements.GetEnumerator(); toRemove.Clear(); while (iter.MoveNext()) { CT_Col elem = iter.Current; if (currentIndex <= elem.max) { break; // all passed elements have been purged } toRemove.Add(elem); } foreach (CT_Col rc in toRemove) { currentElements.Remove(rc); } if (!(currentElements.Count == 0) && lastMaxIndex < currentIndex) { // we need to process previous elements first CT_Col[] copyCols = new CT_Col[currentElements.Count]; currentElements.CopyTo(copyCols); insertCol(cols, lastMaxIndex, currentIndex - 1, copyCols, true, haveOverrideColumn); } currentElements.Add(col); if (colMax > currentMax) { currentMax = colMax; } if (col.Equals(overrideColumn)) { haveOverrideColumn = overrideColumn; } while (currentIndex <= nextIndex && !(currentElements.Count == 0)) { NPOI.Util.Collections.HashSet <CT_Col> currentIndexElements = new NPOI.Util.Collections.HashSet <CT_Col>(); long currentElemIndex; { // narrow scope of currentElem CT_Col currentElem = currentElements.First(); currentElemIndex = currentElem.max; currentIndexElements.Add(currentElem); while (true) { CT_Col higherElem = currentElements.Higher(currentElem); if (higherElem == null || higherElem.max != currentElemIndex) { break; } currentElem = higherElem; currentIndexElements.Add(currentElem); if (colMax > currentMax) { currentMax = colMax; } if (col.Equals(overrideColumn)) { haveOverrideColumn = overrideColumn; } } } //if (currentElemIndex < nextIndex || !flIter.hasNext()) { if (currentElemIndex < nextIndex || !((pos + 1) < flattenedCols.Count)) { CT_Col[] copyCols = new CT_Col[currentElements.Count]; currentElements.CopyTo(copyCols); insertCol(cols, currentIndex, currentElemIndex, copyCols, true, haveOverrideColumn); //if (flIter.hasNext()) { if ((pos + 1) < flattenedCols.Count) { if (nextIndex > currentElemIndex) { //currentElements.removeAll(currentIndexElements); foreach (CT_Col rc in currentIndexElements) { currentElements.Remove(rc); } if (currentIndexElements.Contains(overrideColumn)) { haveOverrideColumn = null; } } } else { //currentElements.removeAll(currentIndexElements); foreach (CT_Col rc in currentIndexElements) { currentElements.Remove(rc); } if (currentIndexElements.Contains(overrideColumn)) { haveOverrideColumn = null; } } lastMaxIndex = currentIndex = currentElemIndex + 1; } else { lastMaxIndex = currentIndex; currentIndex = nextIndex + 1; } } } SortColumns(cols); }
public BasicCookieStore() : base() { this.cookies = new TreeSet<Apache.Http.Cookie.Cookie>(new CookieIdentityComparator ()); }
public void setColumnFormats(TreeSet<ColumnInfoRecord> cf) { columnFormats = cf; }
/// <summary> /// Fills a <see cref="T:IDictionary{string, WeightedSpanTerm}"/> with <see cref="WeightedSpanTerm"/>s using the terms from the supplied <see cref="SpanQuery"/>. /// </summary> /// <param name="terms"><see cref="T:IDictionary{string, WeightedSpanTerm}"/> to place created <see cref="WeightedSpanTerm"/>s in</param> /// <param name="spanQuery"><see cref="SpanQuery"/> to extract Terms from</param> /// <exception cref="System.IO.IOException">If there is a low-level I/O error</exception> protected virtual void ExtractWeightedSpanTerms(IDictionary <string, WeightedSpanTerm> terms, SpanQuery spanQuery) { HashSet <string> fieldNames; if (fieldName == null) { fieldNames = new HashSet <string>(); CollectSpanQueryFields(spanQuery, fieldNames); } else { fieldNames = new HashSet <string>(); fieldNames.Add(fieldName); } // To support the use of the default field name if (defaultField != null) { fieldNames.Add(defaultField); } IDictionary <string, SpanQuery> queries = new HashMap <string, SpanQuery>(); var nonWeightedTerms = Support.Compatibility.SetFactory.CreateHashSet <Term>(); bool mustRewriteQuery = MustRewriteQuery(spanQuery); if (mustRewriteQuery) { foreach (string field in fieldNames) { SpanQuery rewrittenQuery = (SpanQuery)spanQuery.Rewrite(GetLeafContext().Reader); queries[field] = rewrittenQuery; rewrittenQuery.ExtractTerms(nonWeightedTerms); } } else { spanQuery.ExtractTerms(nonWeightedTerms); } List <PositionSpan> spanPositions = new List <PositionSpan>(); foreach (string field in fieldNames) { SpanQuery q; q = mustRewriteQuery ? queries[field] : spanQuery; AtomicReaderContext context = GetLeafContext(); var termContexts = new HashMap <Term, TermContext>(); TreeSet <Term> extractedTerms = new TreeSet <Term>(); q.ExtractTerms(extractedTerms); foreach (Term term in extractedTerms) { termContexts[term] = TermContext.Build(context, term); } IBits acceptDocs = context.AtomicReader.LiveDocs; Spans.Spans spans = q.GetSpans(context, acceptDocs, termContexts); // collect span positions while (spans.Next()) { spanPositions.Add(new PositionSpan(spans.Start, spans.End - 1)); } } if (spanPositions.Count == 0) { // no spans found return; } foreach (Term queryTerm in nonWeightedTerms) { if (FieldNameComparer(queryTerm.Field)) { WeightedSpanTerm weightedSpanTerm; if (!terms.TryGetValue(queryTerm.Text(), out weightedSpanTerm) || weightedSpanTerm == null) { weightedSpanTerm = new WeightedSpanTerm(spanQuery.Boost, queryTerm.Text()); weightedSpanTerm.AddPositionSpans(spanPositions); weightedSpanTerm.IsPositionSensitive = true; terms[queryTerm.Text()] = weightedSpanTerm; } else { if (spanPositions.Count > 0) { weightedSpanTerm.AddPositionSpans(spanPositions); } } } } }
private void InitBlock() { if (!Head.containsKey(g.toString())) { //UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'" H = new TreeSet <Integer>(); //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (arc_g_it.hasNext()) { Arc arc_g = arc_g_it.next(); if (arc_g.From == init) { H.add(arc_g.To); } } Head.put(g.toString(), H); } if (!Tail.containsKey(h.toString())) { FiniteAutomaton fa = new FiniteAutomaton(); //UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'" OneToOneTreeMap <Integer, FAState> st = new OneToOneTreeMap <Integer, FAState>(); //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (arc_h_it.hasNext()) { Arc arc_h = arc_h_it.next(); if (!st.containsKey(arc_h.From)) { st.put(arc_h.From, fa.createState()); } if (!st.containsKey(arc_h.To)) { st.put(arc_h.To, fa.createState()); } fa.addTransition(st.getValue(arc_h.From), st.getValue(arc_h.To), arc_h.Label?"1":"0"); } SCC s = new SCC(fa); //UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'" T = new TreeSet <Integer>(); //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (s_it.hasNext()) { T.add(st.getKey(s_it.next())); } int TailSize = 0; //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (TailSize != T.size()) { TailSize = T.size(); //UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'" TreeSet <Arc> isolatedArcsTemp = new TreeSet <Arc>(); //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (arc_it.hasNext()) { Arc arc = arc_it.next(); if (!T.contains(arc.To)) { isolatedArcsTemp.add(arc); } else { T.add(arc.From); } } isolatedArcs = isolatedArcsTemp; } Tail.put(h.toString(), T); } //UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'" TreeSet <Integer> intersection = new TreeSet <Integer>(); intersection.addAll(Head.get_Renamed(g.toString())); intersection.retainAll(Tail.get_Renamed(h.toString())); if (debug_Renamed_Field) { if (intersection.isEmpty()) { debug("g:" + g + ", Head: " + Head.get_Renamed(g.toString())); debug("h:" + h + ", Tail: " + Tail.get_Renamed(h.toString())); } } return(!intersection.isEmpty()); //UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'" ArrayList <TreeSet <Arc> > graphs = new ArrayList <TreeSet <Arc> >(); //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (symbol_it.hasNext()) { //UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'" TreeSet <Arc> graph = new TreeSet <Arc>(); System.String sym = symbol_it.next(); //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (from_it.hasNext()) { cav2010.automata.FAState from = from_it.next(); if (from.getNext(sym) != null) { //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (to_it.hasNext()) { cav2010.automata.FAState to = to_it.next(); if (input.F.contains(from) || input.F.contains(to)) { graph.add(new Arc(from.id, true, to.id)); } else { graph.add(new Arc(from.id, false, to.id)); } } } } //UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'" ArrayList <TreeSet <Arc> > toRemove = new ArrayList <TreeSet <Arc> >(); bool canAdd = true; //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (old_it.hasNext()) { //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" if (smallerThan(old, graph)) { canAdd = false; break; } else if (smallerThan(graph, old)) { toRemove.add(old); } } if (canAdd) { graphs.add(graph); graphs.removeAll(toRemove); } } return(graphs); //UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'" TreeSet <Arc> f = new TreeSet <Arc>(); //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (arc_g_it.hasNext()) { Arc arc_g = arc_g_it.next(); //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (arc_h_it.hasNext()) { Arc arc_h = arc_h_it.next(); if (arc_g.To == arc_h.From) { if (arc_g.Label || arc_h.Label) { f.add(new Arc(arc_g.From, true, arc_h.To)); f.remove(new Arc(arc_g.From, false, arc_h.To)); } else { if (!f.contains(new Arc(arc_g.From, true, arc_h.To))) { f.add(new Arc(arc_g.From, false, arc_h.To)); } } } } } return(f); //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (arc_g_it.hasNext()) { Arc arc_g = arc_g_it.next(); bool has_larger = false; //UPGRADE_NOTE: There is an untranslated Statement. Please refer to original code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1153'" while (arc_h_it.hasNext()) { Arc arc_h = arc_h_it.next(); if (arc_g.From == arc_h.From) { if (!arc_g.Label || arc_h.Label) { if (arc_g.To == arc_h.To) { has_larger = true; break; } } } } if (!has_larger) { return(false); } } return(true); }
// Given a sequence of strings, return all non-trivial anagram // classes. Should use a *sequenced* equalityComparer on a TreeBag<char>, // obviously: after all, characters can be sorted by ASCII code. On // 347 000 distinct Danish words this takes 70 cpu seconds, 180 MB // memory, and 263 wall-clock seconds (due to swapping). // Using a TreeBag<char> and a sequenced equalityComparer takes 82 cpu seconds // and 180 MB RAM to find the 26,058 anagram classes among 347,000 // distinct words. // Using an unsequenced equalityComparer on TreeBag<char> or HashBag<char> // makes it criminally slow: at least 1200 cpu seconds. This must // be because many bags get the same hash code, so that there are // many collisions. But exactly how the unsequenced equalityComparer works is // not clear ... or is it because unsequenced equality is slow? public static SCG.IEnumerable<SCG.IEnumerable<String>> AnagramClasses(SCG.IEnumerable<String> ss) { bool unseq = true; IDictionary<TreeBag<char>, TreeSet<String>> classes; if (unseq) { SCG.IEqualityComparer<TreeBag<char>> unsequencedTreeBagEqualityComparer = UnsequencedCollectionEqualityComparer<TreeBag<char>, char>.Default; classes = new HashDictionary<TreeBag<char>, TreeSet<String>>(unsequencedTreeBagEqualityComparer); } else { SCG.IEqualityComparer<TreeBag<char>> sequencedTreeBagEqualityComparer = SequencedCollectionEqualityComparer<TreeBag<char>, char>.Default; classes = new HashDictionary<TreeBag<char>, TreeSet<String>>(sequencedTreeBagEqualityComparer); } foreach (String s in ss) { TreeBag<char> anagram = AnagramClass(s); TreeSet<String> anagramClass; if (!classes.Find(ref anagram, out anagramClass)) classes[anagram] = anagramClass = new TreeSet<String>(); anagramClass.Add(s); } foreach (TreeSet<String> anagramClass in classes.Values) if (anagramClass.Count > 1) yield return anagramClass; }
/// <summary>Represent the state of the index in one String.</summary> /// <remarks> /// Represent the state of the index in one String. This representation is /// useful when writing tests which do assertions on the state of the index. /// By default information about path, mode, stage (if different from 0) is /// included. A bitmask controls which additional info about /// modificationTimes, smudge state and length is included. /// <p> /// The format of the returned string is described with this BNF: /// <pre> /// result = ( "[" path mode stage? time? smudge? length? sha1? content? "]" )* . /// mode = ", mode:" number . /// stage = ", stage:" number . /// time = ", time:t" timestamp-index . /// smudge = "" | ", smudged" . /// length = ", length:" number . /// sha1 = ", sha1:" hex-sha1 . /// content = ", content:" blob-data . /// </pre> /// 'stage' is only presented when the stage is different from 0. All /// reported time stamps are mapped to strings like "t0", "t1", ... "tn". The /// smallest reported time-stamp will be called "t0". This allows to write /// assertions against the string although the concrete value of the time /// stamps is unknown. /// </remarks> /// <param name="repo">the repository the index state should be determined for</param> /// <param name="includedOptions"> /// a bitmask constructed out of the constants /// <see cref="MOD_TIME">MOD_TIME</see> /// , /// <see cref="SMUDGE">SMUDGE</see> /// , /// <see cref="LENGTH">LENGTH</see> /// , /// <see cref="CONTENT_ID">CONTENT_ID</see> /// and /// <see cref="CONTENT">CONTENT</see> /// controlling which info is present in the /// resulting string. /// </param> /// <returns>a string encoding the index state</returns> /// <exception cref="System.InvalidOperationException">System.InvalidOperationException /// </exception> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public virtual string IndexState(Repository repo, int includedOptions) { DirCache dc = repo.ReadDirCache(); StringBuilder sb = new StringBuilder(); TreeSet<long> timeStamps = null; // iterate once over the dircache just to collect all time stamps if (0 != (includedOptions & MOD_TIME)) { timeStamps = new TreeSet<long>(); for (int i = 0; i < dc.GetEntryCount(); ++i) { timeStamps.AddItem(Sharpen.Extensions.ValueOf(dc.GetEntry(i).LastModified)); } } // iterate again, now produce the result string for (int i_1 = 0; i_1 < dc.GetEntryCount(); ++i_1) { DirCacheEntry entry = dc.GetEntry(i_1); sb.Append("[" + entry.PathString + ", mode:" + entry.FileMode); int stage = entry.Stage; if (stage != 0) { sb.Append(", stage:" + stage); } if (0 != (includedOptions & MOD_TIME)) { sb.Append(", time:t" + timeStamps.HeadSet(Sharpen.Extensions.ValueOf(entry.LastModified )).Count); } if (0 != (includedOptions & SMUDGE)) { if (entry.IsSmudged) { sb.Append(", smudged"); } } if (0 != (includedOptions & LENGTH)) { sb.Append(", length:" + Sharpen.Extensions.ToString(entry.Length)); } if (0 != (includedOptions & CONTENT_ID)) { sb.Append(", sha1:" + ObjectId.ToString(entry.GetObjectId())); } if (0 != (includedOptions & CONTENT)) { sb.Append(", content:" + Sharpen.Runtime.GetStringForBytes(db.Open(entry.GetObjectId (), Constants.OBJ_BLOB).GetCachedBytes(), "UTF-8")); } if (0 != (includedOptions & ASSUME_UNCHANGED)) { sb.Append(", assume-unchanged:" + entry.IsAssumeValid.ToString().ToLower()); } sb.Append("]"); } return sb.ToString(); }
//$goals 33 //$benchmark public void removeTest(TreeSet treeSet, int aKey) { if (treeSet!=null && treeSet.repOK()) { boolean ret_val = treeSet.remove(aKey); } }
private IEnumerable<BrowseFacet> BuildDynamicRanges() { TreeSet<BrowseFacet> facetSet = new TreeSet<BrowseFacet>(new RangeComparator()); int minCount = ospec.MinHitCount; // we would skip first element at index 0 (which means no value) for (int i = 1; i < count.Length; ++i) { if (count[i] >= minCount) { object val = dataCache.valArray.GetRawValue(i); facetSet.Add(new BrowseFacet(val, count[i])); } } if (ospec.MaxCount <= 0) { ospec.MaxCount = 5; } int maxCount = ospec.MaxCount; BrowseFacet[] facets = facetSet.ToArray(); if (facetSet.Count < maxCount) { ConvertFacets(facets); } else { facets = FoldChoices(facets, maxCount); } return facets; }
public MTMessagesContainer() { TreeSet.__ <clinit>(); this.messages = new TreeSet(new 1 (this)); }