protected void AddGroups(IEnumerable <FileGroup> fileGroups) { lock (_groups) { _groups.AddAll(fileGroups); } }
private static SortedSet <int> ComputeValues(Object unitParameter, ScheduleUnit unit) { if (unitParameter.IsInt()) { var resultSet = new SortedSet <int>(); resultSet.Add(unitParameter.AsInt()); return(resultSet); } // cron parameters not handled as number sets if (unitParameter is CronParameter) { return(null); } var numberSet = (NumberSetParameter)unitParameter; if (numberSet.IsWildcard(unit.Min(), unit.Max())) { return(null); } var result = numberSet.GetValuesInRange(unit.Min(), unit.Max()); var resultSorted = new SortedSet <int>(); resultSorted.AddAll(result); return(resultSorted); }
private void Convert(string path) { using (PdfReader reader = new PdfReader(path)) { var pdfDocument = new PdfDocument(reader); int n = pdfDocument.GetNumberOfPages(); for (int i = 1; i <= n; i++) { var page = pdfDocument.GetPage(i); if (page == null) { break; } var size = page.GetPageSize(); var strategy = new TextExtractStrategy(size); PdfTextExtractor.GetTextFromPage(page, strategy); Verticals.AddAll(strategy.Verticals); Pages.Add(new PdfPageText(strategy.Verticals, strategy.Lines)); } } }
public static IEnumerable <T> GetSorted <T>(this IEnumerable <T> items) { var ret = new SortedSet <T>(); ret.AddAll(items); return(ret); }
public ICollection <string> DoSharding(ICollection <string> availableTargetNames, ICollection <IRouteValue> shardingValues, ConfigurationProperties properties) { IDictionary <string, ICollection <IComparable> > columnShardingValues = new Dictionary <string, ICollection <IComparable> >(shardingValues.Count); IDictionary <string, Range <IComparable> > columnRangeValues = new Dictionary <string, Range <IComparable> >(shardingValues.Count); string logicTableName = ""; foreach (var shardingValue in shardingValues) { if (shardingValue is ListRouteValue listRouteValue) { columnShardingValues.Add(shardingValue.GetColumnName(), listRouteValue.GetValues()); } else if (shardingValue is RangeRouteValue rangeRouteValue) { columnRangeValues.Add(shardingValue.GetColumnName(), rangeRouteValue.GetValueRange()); } logicTableName = shardingValue.GetTableName(); } var shardingResult = shardingAlgorithm.DoSharding(availableTargetNames, new ComplexKeysShardingValue <IComparable>(logicTableName, columnShardingValues, columnRangeValues)); var result = new SortedSet <string>(StringComparer.OrdinalIgnoreCase); result.AddAll(shardingResult); return(result); }
private static ICollection<int> ComputeValues( object unitParameter, ScheduleUnit unit) { ICollection<int> result; if (unitParameter is int) { result = new SortedSet<int>(); result.Add((int) unitParameter); return result; } // cron parameters not handled as number sets if (unitParameter is CronParameter) { return null; } var numberSet = (NumberSetParameter) unitParameter; if (numberSet.IsWildcard(unit.Min(), unit.Max())) { return null; } result = numberSet.GetValuesInRange(unit.Min(), unit.Max()); var resultSorted = new SortedSet<int>(); resultSorted.AddAll(result); return resultSorted; }
public void SetDomains(Uri[] domains) { _domains.Clear(); if (domains != null) { _domains.AddAll(domains); } }
public void SetReferencedServiceProviderCatalogs(Uri[] referencedServiceProviderCatalogs) { _referencedServiceProviderCatalogs.Clear(); if (referencedServiceProviderCatalogs != null) { _referencedServiceProviderCatalogs.AddAll(referencedServiceProviderCatalogs); } }
public void SetUsages(Uri[] usages) { _usages.Clear(); if (usages != null) { _usages.AddAll(usages); } }
public void SetResourceTypes(Uri[] resourceTypes) { _resourceTypes.Clear(); if (resourceTypes != null) { _resourceTypes.AddAll(resourceTypes); } }
public ComplexShardingStrategy(ComplexShardingStrategyConfiguration complexShardingStrategyConfig) { ShardingAssert.ShouldBeNotNull(complexShardingStrategyConfig.ShardingColumns, "Sharding columns cannot be null."); ShardingAssert.ShouldBeNotNull(complexShardingStrategyConfig.ShardingAlgorithm, "Sharding algorithm cannot be null."); shardingColumns = new SortedSet <string>(StringComparer.OrdinalIgnoreCase); shardingColumns.AddAll(complexShardingStrategyConfig.ShardingColumns.Split(new [] { ',' }, StringSplitOptions.RemoveEmptyEntries)); shardingAlgorithm = complexShardingStrategyConfig.ShardingAlgorithm; }
public void SetDetails(Uri[] details) { _details.Clear(); if (details != null) { _details.AddAll(details); } }
/// <summary>Gets the names of all the added resources.</summary> /// <returns>the name of all the added resources.</returns> public virtual ICollection <PdfName> GetResourceNames() { ICollection <PdfName> names = new SortedSet <PdfName>(); foreach (PdfName resType in GetPdfObject().KeySet()) { names.AddAll(GetResourceNames(resType)); } return(names); }
protected internal void AddFiles(IEnumerable <PictureFile.PictureFile> files) { lock (_filesLock) { var newfiles = new SortedSet <PictureFile.PictureFile>(_files); newfiles.AddAll(files); _files = newfiles; Changed = true; } }
public ICollection <string> DoSharding(ICollection <string> availableTargetNames, ICollection <IRouteValue> shardingValues, ConfigurationProperties properties) { var shardingValue = (ListRouteValue)shardingValues.First(); var shardingResult = ShardingAlgorithm.DoSharding(availableTargetNames, new HintShardingValue <IComparable>(shardingValue.GetTableName(), shardingValue.GetColumnName(), shardingValue.GetValues())); var result = new SortedSet <string>(StringComparer.OrdinalIgnoreCase); result.AddAll(shardingResult); return(result); }
/// <summary>Gets the names of all the added resources.</summary> /// <returns>the name of all the added resources.</returns> public virtual ICollection <PdfName> GetResourceNames() { // TODO: isn't it better to use HashSet? Do we really need certain order? ICollection <PdfName> names = new SortedSet <PdfName>(); foreach (PdfName resType in GetPdfObject().KeySet()) { names.AddAll(GetResourceNames(resType)); } return(names); }
public ICollection <string> DoSharding(ICollection <string> availableTargetNames, ICollection <IRouteValue> shardingValues, ConfigurationProperties properties) { IRouteValue shardingValue = shardingValues.First(); ICollection <string> shardingResult = shardingValue is ListRouteValue listRouteValue ? DoSharding(availableTargetNames, listRouteValue) : DoSharding(availableTargetNames, (RangeRouteValue)shardingValue); ICollection <string> result = new SortedSet <string>(StringComparer.OrdinalIgnoreCase); result.AddAll(shardingResult); return(result); }
/// <summary> /// Updates queueToAsk with new data, returns if we found peers closer than we already know. /// </summary> /// <param name="queueToAsk">The queue to get updated.</param> /// <param name="newPeers">The new peers reported from remote peers. Since the remote peers /// do not know what we know, we need to filter this information.</param> /// <param name="alreadyAsked">The peers we already know.</param> /// <returns>True, if we added peers that are closer to the target than we already knew. /// Please note, it will return false if we add new peers that are not closer to a target.</returns> private static bool Merge(SortedSet <PeerAddress> queueToAsk, ICollection <PeerAddress> newPeers, ICollection <PeerAddress> alreadyAsked) { var result = new SortedSet <PeerAddress>(queueToAsk.Comparer); Utils.Utils.Difference(newPeers, result, alreadyAsked); if (result.Count == 0) { return(false); } var first = result.Min; var isNewInfo = IsNew(queueToAsk, first); queueToAsk.AddAll(result); return(isNewInfo); }
/// <summary> /// Tests whether a <see cref="Geometry" /> is sequenced correctly. /// <see cref="LineString" />s are trivially sequenced. /// <see cref="MultiLineString" />s are checked for correct sequencing. /// Otherwise, <c>IsSequenced</c> is defined /// to be <c>true</c> for geometries that are not lineal. /// </summary> /// <param name="geom">The <see cref="Geometry" /> to test.</param> /// <returns> /// <c>true</c> if the <see cref="Geometry" /> is sequenced or is not lineal. /// </returns> public static bool IsSequenced(IGeometry geom) { if (!(geom is IMultiLineString)) { return(true); } IMultiLineString mls = geom as IMultiLineString; // The nodes in all subgraphs which have been completely scanned ISet <ICoordinate> prevSubgraphNodes = new SortedSet <ICoordinate>(); ICoordinate lastNode = null; IList <ICoordinate> currNodes = new List <ICoordinate>(); for (int i = 0; i < mls.NumGeometries; i++) { ILineString line = (ILineString)mls.GetGeometryN(i); ICoordinate startNode = line.GetCoordinateN(0); ICoordinate endNode = line.GetCoordinateN(line.NumPoints - 1); /* * If this linestring is connected to a previous subgraph, geom is not sequenced */ if (prevSubgraphNodes.Contains(startNode)) { return(false); } if (prevSubgraphNodes.Contains(endNode)) { return(false); } if (lastNode != null && startNode != lastNode) { // start new connected sequence prevSubgraphNodes.AddAll(currNodes); currNodes.Clear(); } currNodes.Add(startNode); currNodes.Add(endNode); lastNode = endNode; } return(true); }
/** * Judge logic tables is all belong to binding encryptors. * * @param logicTableNames logic table names * @return logic tables is all belong to binding encryptors or not */ public bool IsAllBindingTables(ICollection <string> logicTableNames) { if (logicTableNames.IsEmpty()) { return(false); } var bindingTableRule = FindBindingTableRule(logicTableNames); if (bindingTableRule == null) { return(false); } ICollection <string> result = new SortedSet <string>(StringComparer.OrdinalIgnoreCase); result.AddAll(bindingTableRule.GetAllLogicTables()); return(!result.IsEmpty() && logicTableNames.All(o => result.Contains(o))); }
public virtual void TestMergeRandom() { PrefixCodedTerms[] pb = new PrefixCodedTerms[TestUtil.NextInt(Random(), 2, 10)]; SortedSet <Term> superSet = new SortedSet <Term>(); for (int i = 0; i < pb.Length; i++) { SortedSet <Term> terms = new SortedSet <Term>(); int nterms = TestUtil.NextInt(Random(), 0, 10000); for (int j = 0; j < nterms; j++) { Term term = new Term(TestUtil.RandomUnicodeString(Random(), 2), TestUtil.RandomUnicodeString(Random(), 4)); terms.Add(term); } superSet.AddAll(terms); PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder(); foreach (Term @ref in terms) { b.Add(@ref); } pb[i] = b.Finish(); } List <IEnumerator <Term> > subs = new List <IEnumerator <Term> >(); for (int i = 0; i < pb.Length; i++) { subs.Add(pb[i].GetEnumerator()); } IEnumerator <Term> expected = superSet.GetEnumerator(); IEnumerator <Term> actual = new MergedIterator <Term>(subs.ToArray()); while (actual.MoveNext()) { Assert.IsTrue(expected.MoveNext()); Assert.AreEqual(expected.Current, actual.Current); } Assert.IsFalse(expected.MoveNext()); }
/** * /// Visit all of the states starting at start with the given vistor * * /// @param visitor the state visitor * /// @param start the place to start the search * /// @param sorted if true, states are sorted before visited * /// @return true if the visiting was terminated before all nodes were visited */ public static Boolean VisitStates(ISentenceHMMStateVisitor visitor, SentenceHMMState start, Boolean sorted) { IEnumerable <SentenceHMMState> states = CollectStates(start); if (sorted) { // sort the states by stateNumber var sortedStates = new SortedSet <SentenceHMMState>(new FirstComparer()); sortedStates.AddAll(states); states = sortedStates; } foreach (var state in states) { if (visitor.Visit(state)) { return(true); } } return(false); }
public virtual void TestMergeRandom() { PrefixCodedTerms[] pb = new PrefixCodedTerms[TestUtil.NextInt(Random(), 2, 10)]; SortedSet<Term> superSet = new SortedSet<Term>(); for (int i = 0; i < pb.Length; i++) { SortedSet<Term> terms = new SortedSet<Term>(); int nterms = TestUtil.NextInt(Random(), 0, 10000); for (int j = 0; j < nterms; j++) { Term term = new Term(TestUtil.RandomUnicodeString(Random(), 2), TestUtil.RandomUnicodeString(Random(), 4)); terms.Add(term); } superSet.AddAll(terms); PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder(); foreach (Term @ref in terms) { b.Add(@ref); } pb[i] = b.Finish(); } List<IEnumerator<Term>> subs = new List<IEnumerator<Term>>(); for (int i = 0; i < pb.Length; i++) { subs.Add(pb[i].GetEnumerator()); } IEnumerator<Term> expected = superSet.GetEnumerator(); IEnumerator<Term> actual = new MergedIterator<Term>(subs.ToArray()); while (actual.MoveNext()) { Assert.IsTrue(expected.MoveNext()); Assert.AreEqual(expected.Current, actual.Current); } Assert.IsFalse(expected.MoveNext()); }
/// <summary> /// Looks for a route to the given peer address. /// </summary> /// <param name="peerAddresses">Nodes that should be asked first for a route.</param> /// <param name="routingBuilder"></param> /// <param name="type"></param> /// <param name="cc"></param> /// <returns>A task object that is set to complete if the route has been found.</returns> private TcsRouting Routing(ICollection <PeerAddress> peerAddresses, RoutingBuilder routingBuilder, Message.Message.MessageType type, ChannelCreator cc) { try { if (peerAddresses == null) { throw new ArgumentException("Some nodes/addresses need to be specified."); } bool randomSearch = routingBuilder.LocationKey == null; IComparer <PeerAddress> comparer; if (randomSearch) { comparer = _peerBean.PeerMap.CreateComparer(); } else { comparer = PeerMap.CreateComparer(routingBuilder.LocationKey); } var queueToAsk = new SortedSet <PeerAddress>(comparer); var alreadyAsked = new SortedSet <PeerAddress>(comparer); // As presented by Kazuyuki Shudo at AIMS 2009, it is better to ask random // peers with the data than ask peers that are ordered by distance. // -> this balances load var directHits = new SortedDictionary <PeerAddress, DigestInfo>(comparer); var potentialHits = new SortedSet <PeerAddress>(comparer); // fill initially queueToAsk.AddAll(peerAddresses); alreadyAsked.Add(_peerBean.ServerPeerAddress); potentialHits.Add(_peerBean.ServerPeerAddress); // domain key can be null if we bootstrap if (type == Message.Message.MessageType.Request2 && routingBuilder.DomainKey != null && !randomSearch && _peerBean.DigestStorage != null) { Number640 from; Number640 to; if (routingBuilder.From != null && routingBuilder.To != null) { from = routingBuilder.From; to = routingBuilder.To; } else if (routingBuilder.ContentKey == null) { from = new Number640(routingBuilder.LocationKey, routingBuilder.DomainKey, Number160.Zero, Number160.Zero); to = new Number640(routingBuilder.LocationKey, routingBuilder.DomainKey, Number160.MaxValue, Number160.MaxValue); } else { from = new Number640(routingBuilder.LocationKey, routingBuilder.DomainKey, routingBuilder.ContentKey, Number160.Zero); to = new Number640(routingBuilder.LocationKey, routingBuilder.DomainKey, routingBuilder.ContentKey, Number160.MaxValue); } var digestBean = _peerBean.DigestStorage.Digest(from, to, -1, true); if (digestBean.Size > 0) { directHits.Add(_peerBean.ServerPeerAddress, digestBean); } } else if (type == Message.Message.MessageType.Request3 && !randomSearch && _peerBean.DigestTracker != null) { var digestInfo = _peerBean.DigestTracker.Digest(routingBuilder.LocationKey, routingBuilder.DomainKey, routingBuilder.ContentKey); // we always put ourselfs to the tracker list, so we need to check // if we know also other peers on our trackers if (digestInfo.Size > 0) { directHits.Add(_peerBean.ServerPeerAddress, digestInfo); } } var tcsRouting = new TcsRouting(); if (peerAddresses.Count == 0) { tcsRouting.SetNeighbors(directHits, potentialHits, alreadyAsked, routingBuilder.IsBootstrap, false); } else { // If a peer bootstraps to itself, then the size of peer addresses is 1 // and it contains itself. Check for that because we need to know if we // are routing, bootstrapping and bootstrapping to ourselfs, to return // the correct status for the task. var isRoutingOnlyToSelf = peerAddresses.Count == 1 && peerAddresses.First().Equals(_peerBean.ServerPeerAddress); var routingMechanism = routingBuilder.CreateRoutingMechanism(tcsRouting); routingMechanism.SetQueueToAsk(queueToAsk); routingMechanism.SetPotentialHits(potentialHits); routingMechanism.SetDirectHits(directHits); routingMechanism.SetAlreadyAsked(alreadyAsked); routingBuilder.SetIsRoutingOnlyToSelf(isRoutingOnlyToSelf); RoutingRec(routingBuilder, routingMechanism, type, cc); } return(tcsRouting); } catch (Exception ex) { Logger.Error("An exception occurred during routing.", ex); throw; } }
public RouteResult Route(ShardingRule shardingRule) { ICollection <RouteResult> result = new List <RouteResult>(logicTables.Count); ICollection <String> bindingTableNames = new SortedSet <string>(StringComparer.OrdinalIgnoreCase); foreach (var logicTable in logicTables) { var tableRule = shardingRule.FindTableRule(logicTable); if (tableRule != null) { if (!bindingTableNames.Contains(logicTable)) { result.Add(new ShardingStandardRoutingEngine(tableRule.LogicTable, sqlStatementContext, shardingConditions, properties).Route(shardingRule)); } shardingRule.FindBindingTableRule(logicTable).IfPresent(bindingTableRule => bindingTableNames.AddAll( bindingTableRule.GetTableRules().Select(o => o.LogicTable).ToList())); } } if (result.IsEmpty()) { throw new ShardingException($"Cannot find table rule and default data source with logic tables: '{logicTables}'"); } if (1 == result.Count) { return(result.First()); } return(new ShardingCartesianRoutingEngine(result).Route(shardingRule)); }
/// <summary> /// Compute a deployment order among the modules passed in considering their uses-dependency declarations. /// <para />The operation also checks and reports circular dependencies. /// <para />Pass in @{link ModuleOrderOptions} to customize the behavior if this method. When passing no options /// or passing default options, the default behavior checks uses-dependencies and circular dependencies. /// </summary> /// <param name="modules">to determine ordering for</param> /// <param name="options">operation options or null for default options</param> /// <param name="deployedModules">deployed modules</param> /// <returns>ordered modules</returns> /// <throws>ModuleOrderException when any module dependencies are not satisfied</throws> public static ModuleOrder GetModuleOrder( ICollection<Module> modules, ISet<string> deployedModules, ModuleOrderOptions options) { if (options == null) { options = new ModuleOrderOptions(); } IList<Module> proposedModules = new List<Module>(); proposedModules.AddAll(modules); ISet<string> availableModuleNames = new HashSet<string>(); foreach (Module proposedModule in proposedModules) { if (proposedModule.Name != null) { availableModuleNames.Add(proposedModule.Name); } } // Collect all deployed modules ISet<string> allDeployedModules = new HashSet<string>(); allDeployedModules.AddAll(deployedModules); foreach (Module proposedModule in proposedModules) { allDeployedModules.Add(proposedModule.Name); } // Collect uses-dependencies of proposed modules IDictionary<string, ISet<string>> usesPerModuleName = new Dictionary<string, ISet<string>>(); foreach (Module proposedModule in proposedModules) { // check uses-dependency is available if (options.IsCheckUses) { if (proposedModule.Uses != null) { foreach (string uses in proposedModule.Uses) { if (availableModuleNames.Contains(uses)) { continue; } bool deployed = allDeployedModules.Contains(uses); if (deployed) { continue; } string message = "Module-dependency not found"; if (proposedModule.Name != null) { message += " as declared by module '" + proposedModule.Name + "'"; } message += " for uses-declaration '" + uses + "'"; throw new ModuleOrderException(message); } } } if ((proposedModule.Name == null) || (proposedModule.Uses == null)) { continue; } ISet<string> usesSet = usesPerModuleName.Get(proposedModule.Name); if (usesSet == null) { usesSet = new HashSet<string>(); usesPerModuleName.Put(proposedModule.Name, usesSet); } usesSet.AddAll(proposedModule.Uses); } IDictionary<string, SortedSet<int>> proposedModuleNames = new Dictionary<string, SortedSet<int>>() .WithNullKeySupport(); int count = 0; foreach (Module proposedModule in proposedModules) { SortedSet<int> moduleNumbers = proposedModuleNames.Get(proposedModule.Name); if (moduleNumbers == null) { moduleNumbers = new SortedSet<int>(); proposedModuleNames.Put(proposedModule.Name, moduleNumbers); } moduleNumbers.Add(count); count++; } DependencyGraph graph = new DependencyGraph(proposedModules.Count, false); int fromModule = 0; foreach (Module proposedModule in proposedModules) { if ((proposedModule.Uses == null) || (proposedModule.Uses.IsEmpty())) { fromModule++; continue; } SortedSet<int> dependentModuleNumbers = new SortedSet<int>(); foreach (string use in proposedModule.Uses) { SortedSet<int> moduleNumbers = proposedModuleNames.Get(use); if (moduleNumbers == null) { continue; } dependentModuleNumbers.AddAll(moduleNumbers); } dependentModuleNumbers.Remove(fromModule); graph.AddDependency(fromModule, dependentModuleNumbers); fromModule++; } if (options.IsCheckCircularDependency) { // Stack<int> var circular = graph.FirstCircularDependency; if (circular != null) { string message = ""; string delimiter = ""; foreach (int i in circular) { message += delimiter; message += "module '" + proposedModules[i].Name + "'"; delimiter = " uses (depends on) "; } throw new ModuleOrderException( "Circular dependency detected in module uses-relationships: " + message); } } IList<Module> reverseDeployList = new List<Module>(); ISet<int> ignoreList = new HashSet<int>(); while (ignoreList.Count < proposedModules.Count) { // seconardy sort according to the order of listing ISet<int> rootNodes = new SortedSet<int>( new ProxyComparer<int>() { ProcCompare = ( o1, o2) => { return -1 * o1.CompareTo(o2); }, }); rootNodes.AddAll(graph.GetRootNodes(ignoreList)); if (rootNodes.IsEmpty()) { // circular dependency could cause this for (int i = 0; i < proposedModules.Count; i++) { if (!ignoreList.Contains(i)) { rootNodes.Add(i); break; } } } foreach (int root in rootNodes) { ignoreList.Add(root); reverseDeployList.Add(proposedModules[root]); } } reverseDeployList.Reverse(); return new ModuleOrder(reverseDeployList); }
/// <summary> /// Updates queueToAsk with new data, returns if we found peers closer than we already know. /// </summary> /// <param name="queueToAsk">The queue to get updated.</param> /// <param name="newPeers">The new peers reported from remote peers. Since the remote peers /// do not know what we know, we need to filter this information.</param> /// <param name="alreadyAsked">The peers we already know.</param> /// <returns>True, if we added peers that are closer to the target than we already knew. /// Please note, it will return false if we add new peers that are not closer to a target.</returns> private static bool Merge(SortedSet<PeerAddress> queueToAsk, ICollection<PeerAddress> newPeers, ICollection<PeerAddress> alreadyAsked) { var result = new SortedSet<PeerAddress>(queueToAsk.Comparer); Utils.Utils.Difference(newPeers, result, alreadyAsked); if (result.Count == 0) { return false; } var first = result.Min; var isNewInfo = IsNew(queueToAsk, first); queueToAsk.AddAll(result); return isNewInfo; }
private IndexIterationContext CreateContext(int nDocs, RandomIndexWriter fromWriter, RandomIndexWriter toWriter, bool multipleValuesPerDocument, bool scoreDocsInOrder) { IndexIterationContext context = new IndexIterationContext(); int numRandomValues = nDocs / 2; context.RandomUniqueValues = new string[numRandomValues]; ISet <string> trackSet = new HashSet <string>(); context.RandomFrom = new bool[numRandomValues]; for (int i = 0; i < numRandomValues; i++) { string uniqueRandomValue; do { uniqueRandomValue = TestUtil.RandomRealisticUnicodeString(Random()); // uniqueRandomValue = TestUtil.randomSimpleString(random); } while ("".Equals(uniqueRandomValue, StringComparison.Ordinal) || trackSet.Contains(uniqueRandomValue)); // Generate unique values and empty strings aren't allowed. trackSet.Add(uniqueRandomValue); context.RandomFrom[i] = Random().NextBoolean(); context.RandomUniqueValues[i] = uniqueRandomValue; } RandomDoc[] docs = new RandomDoc[nDocs]; for (int i = 0; i < nDocs; i++) { string id = Convert.ToString(i); int randomI = Random().Next(context.RandomUniqueValues.Length); string value = context.RandomUniqueValues[randomI]; Document document = new Document(); document.Add(NewTextField(Random(), "id", id, Field.Store.NO)); document.Add(NewTextField(Random(), "value", value, Field.Store.NO)); bool from = context.RandomFrom[randomI]; int numberOfLinkValues = multipleValuesPerDocument ? 2 + Random().Next(10) : 1; docs[i] = new RandomDoc(id, numberOfLinkValues, value, from); for (int j = 0; j < numberOfLinkValues; j++) { string linkValue = context.RandomUniqueValues[Random().Next(context.RandomUniqueValues.Length)]; docs[i].LinkValues.Add(linkValue); if (from) { if (!context.FromDocuments.ContainsKey(linkValue)) { context.FromDocuments[linkValue] = new List <RandomDoc>(); } if (!context.RandomValueFromDocs.ContainsKey(value)) { context.RandomValueFromDocs[value] = new List <RandomDoc>(); } context.FromDocuments[linkValue].Add(docs[i]); context.RandomValueFromDocs[value].Add(docs[i]); document.Add(NewTextField(Random(), "from", linkValue, Field.Store.NO)); } else { if (!context.ToDocuments.ContainsKey(linkValue)) { context.ToDocuments[linkValue] = new List <RandomDoc>(); } if (!context.RandomValueToDocs.ContainsKey(value)) { context.RandomValueToDocs[value] = new List <RandomDoc>(); } context.ToDocuments[linkValue].Add(docs[i]); context.RandomValueToDocs[value].Add(docs[i]); document.Add(NewTextField(Random(), "to", linkValue, Field.Store.NO)); } } RandomIndexWriter w; if (from) { w = fromWriter; } else { w = toWriter; } w.AddDocument(document); if (Random().Next(10) == 4) { w.Commit(); } if (VERBOSE) { Console.WriteLine("Added document[" + docs[i].Id + "]: " + document); } } // Pre-compute all possible hits for all unique random values. On top of this also compute all possible score for // any ScoreMode. IndexSearcher fromSearcher = NewSearcher(fromWriter.Reader); IndexSearcher toSearcher = NewSearcher(toWriter.Reader); for (int i = 0; i < context.RandomUniqueValues.Length; i++) { string uniqueRandomValue = context.RandomUniqueValues[i]; string fromField; string toField; IDictionary <string, IDictionary <int, JoinScore> > queryVals; if (context.RandomFrom[i]) { fromField = "from"; toField = "to"; queryVals = context.FromHitsToJoinScore; } else { fromField = "to"; toField = "from"; queryVals = context.ToHitsToJoinScore; } IDictionary <BytesRef, JoinScore> joinValueToJoinScores = new Dictionary <BytesRef, JoinScore>(); if (multipleValuesPerDocument) { fromSearcher.Search(new TermQuery(new Term("value", uniqueRandomValue)), new CollectorAnonymousInnerClassHelper3(this, context, fromField, joinValueToJoinScores)); } else { fromSearcher.Search(new TermQuery(new Term("value", uniqueRandomValue)), new CollectorAnonymousInnerClassHelper4(this, context, fromField, joinValueToJoinScores)); } IDictionary <int, JoinScore> docToJoinScore = new Dictionary <int, JoinScore>(); if (multipleValuesPerDocument) { if (scoreDocsInOrder) { AtomicReader slowCompositeReader = SlowCompositeReaderWrapper.Wrap(toSearcher.IndexReader); Terms terms = slowCompositeReader.GetTerms(toField); if (terms != null) { DocsEnum docsEnum = null; TermsEnum termsEnum = null; SortedSet <BytesRef> joinValues = new SortedSet <BytesRef>(BytesRef.UTF8SortedAsUnicodeComparer); joinValues.AddAll(joinValueToJoinScores.Keys); foreach (BytesRef joinValue in joinValues) { termsEnum = terms.GetIterator(termsEnum); if (termsEnum.SeekExact(joinValue)) { docsEnum = termsEnum.Docs(slowCompositeReader.LiveDocs, docsEnum, DocsFlags.NONE); JoinScore joinScore = joinValueToJoinScores[joinValue]; for (int doc = docsEnum.NextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = docsEnum.NextDoc()) { // First encountered join value determines the score. // Something to keep in mind for many-to-many relations. if (!docToJoinScore.ContainsKey(doc)) { docToJoinScore[doc] = joinScore; } } } } } } else { toSearcher.Search(new MatchAllDocsQuery(), new CollectorAnonymousInnerClassHelper5(this, context, toField, joinValueToJoinScores, docToJoinScore)); } } else { toSearcher.Search(new MatchAllDocsQuery(), new CollectorAnonymousInnerClassHelper6(this, toField, joinValueToJoinScores, docToJoinScore)); } queryVals[uniqueRandomValue] = docToJoinScore; } fromSearcher.IndexReader.Dispose(); toSearcher.IndexReader.Dispose(); return(context); }
/// <summary> /// Looks for a route to the given peer address. /// </summary> /// <param name="peerAddresses">Nodes that should be asked first for a route.</param> /// <param name="routingBuilder"></param> /// <param name="type"></param> /// <param name="cc"></param> /// <returns>A task object that is set to complete if the route has been found.</returns> private TcsRouting Routing(ICollection<PeerAddress> peerAddresses, RoutingBuilder routingBuilder, Message.Message.MessageType type, ChannelCreator cc) { try { if (peerAddresses == null) { throw new ArgumentException("Some nodes/addresses need to be specified."); } bool randomSearch = routingBuilder.LocationKey == null; IComparer<PeerAddress> comparer; if (randomSearch) { comparer = _peerBean.PeerMap.CreateComparer(); } else { comparer = PeerMap.CreateComparer(routingBuilder.LocationKey); } var queueToAsk = new SortedSet<PeerAddress>(comparer); var alreadyAsked = new SortedSet<PeerAddress>(comparer); // As presented by Kazuyuki Shudo at AIMS 2009, it is better to ask random // peers with the data than ask peers that are ordered by distance. // -> this balances load var directHits = new SortedDictionary<PeerAddress, DigestInfo>(comparer); var potentialHits = new SortedSet<PeerAddress>(comparer); // fill initially queueToAsk.AddAll(peerAddresses); alreadyAsked.Add(_peerBean.ServerPeerAddress); potentialHits.Add(_peerBean.ServerPeerAddress); // domain key can be null if we bootstrap if (type == Message.Message.MessageType.Request2 && routingBuilder.DomainKey != null && !randomSearch && _peerBean.DigestStorage != null) { Number640 from; Number640 to; if (routingBuilder.From != null && routingBuilder.To != null) { from = routingBuilder.From; to = routingBuilder.To; } else if (routingBuilder.ContentKey == null) { from = new Number640(routingBuilder.LocationKey, routingBuilder.DomainKey, Number160.Zero, Number160.Zero); to = new Number640(routingBuilder.LocationKey, routingBuilder.DomainKey, Number160.MaxValue, Number160.MaxValue); } else { from = new Number640(routingBuilder.LocationKey, routingBuilder.DomainKey, routingBuilder.ContentKey, Number160.Zero); to = new Number640(routingBuilder.LocationKey, routingBuilder.DomainKey, routingBuilder.ContentKey, Number160.MaxValue); } var digestBean = _peerBean.DigestStorage.Digest(from, to, -1, true); if (digestBean.Size > 0) { directHits.Add(_peerBean.ServerPeerAddress, digestBean); } } else if (type == Message.Message.MessageType.Request3 && !randomSearch && _peerBean.DigestTracker != null) { var digestInfo = _peerBean.DigestTracker.Digest(routingBuilder.LocationKey, routingBuilder.DomainKey, routingBuilder.ContentKey); // we always put ourselfs to the tracker list, so we need to check // if we know also other peers on our trackers if (digestInfo.Size > 0) { directHits.Add(_peerBean.ServerPeerAddress, digestInfo); } } var tcsRouting = new TcsRouting(); if (peerAddresses.Count == 0) { tcsRouting.SetNeighbors(directHits, potentialHits, alreadyAsked, routingBuilder.IsBootstrap, false); } else { // If a peer bootstraps to itself, then the size of peer addresses is 1 // and it contains itself. Check for that because we need to know if we // are routing, bootstrapping and bootstrapping to ourselfs, to return // the correct status for the task. var isRoutingOnlyToSelf = peerAddresses.Count == 1 && peerAddresses.First().Equals(_peerBean.ServerPeerAddress); var routingMechanism = routingBuilder.CreateRoutingMechanism(tcsRouting); routingMechanism.SetQueueToAsk(queueToAsk); routingMechanism.SetPotentialHits(potentialHits); routingMechanism.SetDirectHits(directHits); routingMechanism.SetAlreadyAsked(alreadyAsked); routingBuilder.SetIsRoutingOnlyToSelf(isRoutingOnlyToSelf); RoutingRec(routingBuilder, routingMechanism, type, cc); } return tcsRouting; } catch (Exception ex) { Logger.Error("An exception occurred during routing.", ex); throw; } }
/** * Resolve compound glyph references. */ private void AddCompoundReferences() { if (hasAddedCompoundReferences) { return; } hasAddedCompoundReferences = true; bool hasNested; do { GlyphTable g = ttf.Glyph; long[] offsets = ttf.IndexToLocation.Offsets; Bytes.Buffer input = ttf.OriginalData; ISet <int> glyphIdsToAdd = null; try { long isResult = input.Skip(g.Offset); if (isResult.CompareTo(g.Offset) != 0) { Debug.WriteLine($"debug: Tried skipping {g.Offset} bytes but skipped only {isResult} bytes"); } long lastOff = 0L; foreach (int glyphId in glyphIds) { long offset = offsets[glyphId]; long len = offsets[glyphId + 1] - offset; isResult = input.Skip(offset - lastOff); if (isResult.CompareTo(offset - lastOff) != 0) { Debug.WriteLine($"debug: Tried skipping {(offset - lastOff)} bytes but skipped only {isResult} bytes"); } sbyte[] buf = new sbyte[(int)len]; isResult = input.Read(buf); if (isResult.CompareTo(len) != 0) { Debug.WriteLine($"debug: Tried reading {len} bytes but only {isResult} bytes read"); } // rewrite glyphIds for compound glyphs if (buf.Length >= 2 && buf[0] == -1 && buf[1] == -1) { int off = 2 * 5; int flags; do { flags = (buf[off] & 0xff) << 8 | buf[off + 1] & 0xff; off += 2; int ogid = (buf[off] & 0xff) << 8 | buf[off + 1] & 0xff; if (!glyphIds.Contains(ogid)) { if (glyphIdsToAdd == null) { glyphIdsToAdd = new HashSet <int>(); } glyphIdsToAdd.Add(ogid); } off += 2; // ARG_1_AND_2_ARE_WORDS if ((flags & 1 << 0) != 0) { off += 2 * 2; } else { off += 2; } // WE_HAVE_A_TWO_BY_TWO if ((flags & 1 << 7) != 0) { off += 2 * 4; } // WE_HAVE_AN_X_AND_Y_SCALE else if ((flags & 1 << 6) != 0) { off += 2 * 2; } // WE_HAVE_A_SCALE else if ((flags & 1 << 3) != 0) { off += 2; } }while ((flags & 1 << 5) != 0); // MORE_COMPONENTS } lastOff = offsets[glyphId + 1]; } } finally { input.Dispose(); } if (glyphIdsToAdd != null) { glyphIds.AddAll(glyphIdsToAdd); } hasNested = glyphIdsToAdd != null; }while (hasNested); }
public DeploymentOrder GetDeploymentOrder(ICollection <Module> modules, DeploymentOrderOptions options) { using (_iLock.Acquire()) { if (options == null) { options = new DeploymentOrderOptions(); } var deployments = _deploymentStateService.Deployments; var proposedModules = new List <Module>(); proposedModules.AddAll(modules); ICollection <String> availableModuleNames = new HashSet <String>(); foreach (var proposedModule in proposedModules) { if (proposedModule.Name != null) { availableModuleNames.Add(proposedModule.Name); } } // Collect all uses-dependencies of existing modules IDictionary <String, ICollection <String> > usesPerModuleName = new Dictionary <String, ICollection <String> >(); foreach (var deployment in deployments) { var info = _deploymentStateService.GetDeployment(deployment); if (info == null) { continue; } if ((info.Module.Name == null) || (info.Module.Uses == null)) { continue; } var usesSet = usesPerModuleName.Get(info.Module.Name); if (usesSet == null) { usesSet = new HashSet <String>(); usesPerModuleName.Put(info.Module.Name, usesSet); } usesSet.AddAll(info.Module.Uses); } // Collect uses-dependencies of proposed modules foreach (var proposedModule in proposedModules) { // check uses-dependency is available if (options.IsCheckUses) { if (proposedModule.Uses != null) { foreach (var uses in proposedModule.Uses) { if (availableModuleNames.Contains(uses)) { continue; } if (IsDeployed(uses)) { continue; } var message = "Module-dependency not found"; if (proposedModule.Name != null) { message += " as declared by module '" + proposedModule.Name + "'"; } message += " for uses-declaration '" + uses + "'"; throw new DeploymentOrderException(message); } } } if ((proposedModule.Name == null) || (proposedModule.Uses == null)) { continue; } var usesSet = usesPerModuleName.Get(proposedModule.Name); if (usesSet == null) { usesSet = new HashSet <String>(); usesPerModuleName.Put(proposedModule.Name, usesSet); } usesSet.AddAll(proposedModule.Uses); } var proposedModuleNames = new HashMap <String, SortedSet <int> >(); var count = 0; foreach (var proposedModule in proposedModules) { var moduleNumbers = proposedModuleNames.Get(proposedModule.Name); if (moduleNumbers == null) { moduleNumbers = new SortedSet <int>(); proposedModuleNames.Put(proposedModule.Name, moduleNumbers); } moduleNumbers.Add(count); count++; } var graph = new DependencyGraph(proposedModules.Count, false); var fromModule = 0; foreach (var proposedModule in proposedModules) { if ((proposedModule.Uses == null) || (proposedModule.Uses.IsEmpty())) { fromModule++; continue; } var dependentModuleNumbers = new SortedSet <int>(); foreach (var use in proposedModule.Uses) { var moduleNumbers = proposedModuleNames.Get(use); if (moduleNumbers == null) { continue; } dependentModuleNumbers.AddAll(moduleNumbers); } dependentModuleNumbers.Remove(fromModule); graph.AddDependency(fromModule, dependentModuleNumbers); fromModule++; } if (options.IsCheckCircularDependency) { var circular = graph.FirstCircularDependency; if (circular != null) { var message = ""; var delimiter = ""; foreach (var i in circular) { message += delimiter; message += "module '" + proposedModules[i].Name + "'"; delimiter = " uses (depends on) "; } throw new DeploymentOrderException( "Circular dependency detected in module uses-relationships: " + message); } } var reverseDeployList = new List <Module>(); var ignoreList = new HashSet <int>(); while (ignoreList.Count < proposedModules.Count) { // seconardy sort according to the order of listing ICollection <int> rootNodes = new SortedSet <int>( new StandardComparer <int>((o1, o2) => - 1 * o1.CompareTo(o2))); rootNodes.AddAll(graph.GetRootNodes(ignoreList)); if (rootNodes.IsEmpty()) { // circular dependency could cause this for (var i = 0; i < proposedModules.Count; i++) { if (!ignoreList.Contains(i)) { rootNodes.Add(i); break; } } } foreach (var root in rootNodes) { ignoreList.Add(root); reverseDeployList.Add(proposedModules[root]); } } reverseDeployList.Reverse(); return(new DeploymentOrder(reverseDeployList)); } }